Skip to content

Commit

Permalink
Add functional test for /conversation/custom endpoint
Browse files Browse the repository at this point in the history
- Allow the disabling of app insights and loading
config from blob storage to make testing easier
- Switch to `Thread` from `Process` for running the app
  - This is to stop pytest calling the session scoped fixure multiple times
  - It also fixes an issue where we were not seeing app logs
- Setup the mock server as https due to the azure libraries enforcing it
  - This introduced additional fixtures and the `trustme` dependency
- Commit the encoding file for tiktoken
  - This is to stop it making a http call over the internet to retrive it

Required by #420
  • Loading branch information
adamdougal committed Mar 20, 2024
1 parent 286fe62 commit 7efab26
Show file tree
Hide file tree
Showing 11 changed files with 100,613 additions and 57 deletions.
20 changes: 13 additions & 7 deletions code/backend/batch/utilities/helpers/ConfigHelper.py
Original file line number Diff line number Diff line change
Expand Up @@ -70,13 +70,19 @@ def __init__(self, logging: dict):
class ConfigHelper:
@staticmethod
def get_active_config_or_default():
try:
blob_client = AzureBlobStorageClient(container_name=CONFIG_CONTAINER_NAME)
config = blob_client.download_file("active.json")
config = Config(json.loads(config))
except Exception:
print("Returning default config")
config = ConfigHelper.get_default_config()
env_helper = EnvHelper()
config = ConfigHelper.get_default_config()

if env_helper.LOAD_CONFIG_FROM_BLOB_STORAGE:
try:
blob_client = AzureBlobStorageClient(
container_name=CONFIG_CONTAINER_NAME
)
config_file = blob_client.download_file("active.json")
config = Config(json.loads(config_file))
except Exception:
print("Returning default config")

return config

@staticmethod
Expand Down
8 changes: 8 additions & 0 deletions code/backend/batch/utilities/helpers/EnvHelper.py
Original file line number Diff line number Diff line change
Expand Up @@ -144,6 +144,10 @@ def __init__(self, **kwargs) -> None:
else os.getenv("AZURE_FORM_RECOGNIZER_KEY", "")
)
# Azure App Insights
self.APPINSIGHTS_ENABLED = (
os.getenv("APPINSIGHTS_ENABLED", "True").lower() == "true"
)

self.APPINSIGHTS_CONNECTION_STRING = os.getenv(
"APPINSIGHTS_CONNECTION_STRING", ""
)
Expand All @@ -170,6 +174,10 @@ def __init__(self, **kwargs) -> None:
# Speech Service
self.AZURE_SPEECH_SERVICE_REGION = os.getenv("AZURE_SPEECH_SERVICE_REGION")

self.LOAD_CONFIG_FROM_BLOB_STORAGE = (
os.getenv("LOAD_CONFIG_FROM_BLOB_STORAGE", "True").lower() == "true"
)

def should_use_data(self) -> bool:
if (
self.AZURE_SEARCH_SERVICE
Expand Down
9 changes: 6 additions & 3 deletions code/backend/batch/utilities/loggers/TokenLogger.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,12 @@ class TokenLogger:
def __init__(self, name: str = __name__):
env_helper: EnvHelper = EnvHelper()
self.logger = logging.getLogger(name)
self.logger.addHandler(
AzureLogHandler(connection_string=env_helper.APPINSIGHTS_CONNECTION_STRING)
)
if env_helper.APPINSIGHTS_ENABLED:
self.logger.addHandler(
AzureLogHandler(
connection_string=env_helper.APPINSIGHTS_CONNECTION_STRING
)
)
self.logger.setLevel(logging.INFO)

def get_logger(self):
Expand Down
6 changes: 6 additions & 0 deletions code/tests/functional/backend_api/app_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,6 +6,12 @@ class AppConfig:
config: Dict[str, Any] = {
"AZURE_SPEECH_SERVICE_KEY": "some-azure-speech-service-key",
"AZURE_SPEECH_SERVICE_REGION": "some-azure-speech-service-region",
"APPINSIGHTS_ENABLED": "False",
"AZURE_OPENAI_API_KEY": "some-azure-openai-api-key",
"AZURE_SEARCH_KEY": "some-azure-search-key",
"AZURE_OPENAI_EMBEDDING_MODEL": "some-embedding-model",
"AZURE_OPENAI_MODEL": "some-openai-model",
"LOAD_CONFIG_FROM_BLOB_STORAGE": "false",
}

def __init__(self, config_overrides: Dict[str, Any] = {}) -> None:
Expand Down
178 changes: 140 additions & 38 deletions code/tests/functional/backend_api/conftest.py
Original file line number Diff line number Diff line change
@@ -1,69 +1,171 @@
from multiprocessing import Process
import socket
import ssl
import threading
import time
import pytest
from pytest_httpserver import HTTPServer
import requests
from app import app
from tests.functional.backend_api.app_config import AppConfig
from threading import Thread
import trustme
import importlib
from app import app as flask_app
import app


@pytest.fixture(scope="module")
@pytest.fixture(scope="session")
def ca():
return trustme.CA()


@pytest.fixture(scope="session")
def httpserver_ssl_context(ca):
context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
localhost_cert = ca.issue_cert("localhost")
localhost_cert.configure_cert(context)
return context


@pytest.fixture(scope="session")
def httpclient_ssl_context(ca):
with ca.cert_pem.tempfile() as ca_temp_path:
return ssl.create_default_context(cafile=ca_temp_path)


@pytest.fixture(scope="session")
def app_port() -> int:
print("Getting free port")
return get_free_port()


@pytest.fixture(scope="module")
@pytest.fixture(scope="session")
def app_url(app_port: int) -> int:
return f"http:https://localhost:{app_port}"


@pytest.fixture(scope="module")
def mock_httpserver(make_httpserver):
"""
This is required as the default pytest httpserver fixture is scoped at the function level
"""
print("Starting HTTP Mock Server")
server = make_httpserver
yield server
print("Stopping HTTP Mock Server")
server.clear()


@pytest.fixture(scope="module")
def app_config(mock_httpserver: HTTPServer) -> AppConfig:
return AppConfig({"AZURE_OPENAI_ENDPOINT": mock_httpserver.url_for("/")})


@pytest.fixture(scope="module", autouse=True)
@pytest.fixture(scope="session")
def app_config(make_httpserver, ca):
print("Creating APP CONFIG")
with ca.cert_pem.tempfile() as ca_temp_path:
app_config = AppConfig(
{
"AZURE_OPENAI_ENDPOINT": f"https://localhost:{make_httpserver.port}",
"AZURE_SEARCH_SERVICE": f"https://localhost:{make_httpserver.port}",
"AZURE_CONTENT_SAFETY_ENDPOINT": f"https://localhost:{make_httpserver.port}",
"SSL_CERT_FILE": ca_temp_path,
"CURL_CA_BUNDLE": ca_temp_path,
"TIKTOKEN_CACHE_DIR": "code/tests/functional/backend_api/resources",
}
)
print(f"Created app config: {app_config.get_all()}")
yield app_config


@pytest.fixture(scope="session", autouse=True)
def manage_app(app_port: int, app_config: AppConfig):
app_config.apply_to_environment()
app_process = start_app(app_port)
start_app(app_port)
yield
stop_app(app_process)
app_config.remove_from_environment()


def start_app(port: int) -> Process:
print(f"Starting application on port {port}")
proc = Process(target=app.run, kwargs={"port": port, "debug": True})
proc.start()
wait_for_app(port)
print("Application started")
return proc
@pytest.fixture(scope="function", autouse=True)
def setup_default_mocking(httpserver: HTTPServer, app_config: AppConfig):
httpserver.expect_request(
f"/openai/deployments/{app_config.get('AZURE_OPENAI_EMBEDDING_MODEL')}/embeddings",
query_string="api-version=2023-12-01-preview",
method="POST",
).respond_with_json(
{
"object": "list",
"data": [
{
"object": "embedding",
"embedding": [0.018990106880664825, -0.0073809814639389515],
"index": 0,
}
],
"model": "text-embedding-ada-002",
}
)

httpserver.expect_request(
"/indexes('conversations')",
query_string="api-version=2023-11-01",
method="GET",
).respond_with_json({})

httpserver.expect_request(
"/contentsafety/text:analyze",
query_string="api-version=2023-10-01",
method="POST",
).respond_with_json(
{
"blocklistsMatch": [],
"categoriesAnalysis": [],
}
)

httpserver.expect_request(
f"/openai/deployments/{app_config.get('AZURE_OPENAI_MODEL')}/chat/completions",
query_string="api-version=2023-12-01-preview",
method="POST",
).respond_with_json(
{
"id": "chatcmpl-6v7mkQj980V1yBec6ETrKPRqFjNw9",
"object": "chat.completion",
"created": 1679072642,
"model": "gpt-35-turbo",
"usage": {
"prompt_tokens": 58,
"completion_tokens": 68,
"total_tokens": 126,
},
"choices": [
{
"message": {
"role": "assistant",
"content": "42 is the meaning of life",
},
"finish_reason": "stop",
"index": 0,
}
],
}
)

httpserver.expect_request(
"/indexes('conversations')/docs/search.index",
query_string="api-version=2023-11-01",
method="POST",
).respond_with_json(
{
"value": [
{"key": "1", "status": True, "errorMessage": None, "statusCode": 201}
]
}
)

yield

httpserver.check()


def stop_app(proc: Process):
print("Shutting down application")
proc.terminate()
proc.join() # Wait until the process is fully shut down
print("Application shut down")
def start_app(app_port: int) -> Thread:
print(f"Starting application on port {app_port}")
# ensure app is reloaded now that new environment variables are set
importlib.reload(app)
app_process = threading.Thread(target=lambda: flask_app.run(port=app_port))
app_process.daemon = True
app_process.start()
wait_for_app(app_port)
print("Application started")
return app_process


def wait_for_app(port: int):
def wait_for_app(port: int, initial_check_delay: int = 10):
attempts = 0

time.sleep(initial_check_delay)
while attempts < 10:
try:
response = requests.get(f"http:https://localhost:{port}/api/config")
Expand Down

0 comments on commit 7efab26

Please sign in to comment.