Skip to content

Commit

Permalink
Add debug logging and fix DLAC (#387)
Browse files Browse the repository at this point in the history
Co-authored-by: Sarah Widder <[email protected]>
  • Loading branch information
sarah-widder and sarah-widder committed Nov 14, 2023
1 parent 7d624a3 commit 60e6b3e
Show file tree
Hide file tree
Showing 2 changed files with 44 additions and 5 deletions.
7 changes: 7 additions & 0 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -117,6 +117,13 @@ const getUserInfoList = async () => {
## Common Customization Scenarios
Feel free to fork this repository and make your own modifications to the UX or backend logic. For example, you may want to change aspects of the chat display, or expose some of the settings in `app.py` in the UI for users to try out different behaviors.

### Debugging your deployed app
First, add an environment variable on the app service resource called "DEBUG". Set this to "true".

Next, enable logging on the app service. Go to "App Service logs" under Monitoring, and change Application logging to File System. Save the change.

Now, you should be able to see logs from your app by viewing "Log stream" under Monitoring.

### Updating the default chat logo and headers
The landing chat page logo and headers are specified in `frontend/src/pages/chat/Chat.tsx`:
```
Expand Down
42 changes: 37 additions & 5 deletions app.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
import logging
import requests
import openai
import copy
from azure.identity import DefaultAzureCredential
from flask import Flask, Response, request, jsonify, send_from_directory
from dotenv import load_dotenv
Expand All @@ -27,6 +28,12 @@ def favicon():
def assets(path):
return send_from_directory("static/assets", path)

# Debug settings
DEBUG = os.environ.get("DEBUG", "false")
DEBUG_LOGGING = DEBUG.lower() == "true"
if DEBUG_LOGGING:
logging.basicConfig(level=logging.DEBUG)

# On Your Data Settings
DATASOURCE_TYPE = os.environ.get("DATASOURCE_TYPE", "AzureCognitiveSearch")
SEARCH_TOP_K = os.environ.get("SEARCH_TOP_K", 5)
Expand Down Expand Up @@ -119,9 +126,13 @@ def is_chat_model():

def should_use_data():
if AZURE_SEARCH_SERVICE and AZURE_SEARCH_INDEX and AZURE_SEARCH_KEY:
if DEBUG_LOGGING:
logging.debug("Using Azure Cognitive Search")
return True

if AZURE_COSMOSDB_MONGO_VCORE_DATABASE and AZURE_COSMOSDB_MONGO_VCORE_CONTAINER and AZURE_COSMOSDB_MONGO_VCORE_INDEX and AZURE_COSMOSDB_MONGO_VCORE_CONNECTION_STRING:
if DEBUG_LOGGING:
logging.debug("Using Azure CosmosDB Mongo vcore")
return True

return False
Expand All @@ -143,6 +154,8 @@ def fetchUserGroups(userToken, nextLink=None):
try :
r = requests.get(endpoint, headers=headers)
if r.status_code != 200:
if DEBUG_LOGGING:
logging.error(f"Error fetching user groups: {r.status_code} {r.text}")
return []

r = r.json()
Expand All @@ -152,6 +165,7 @@ def fetchUserGroups(userToken, nextLink=None):

return r['value']
except Exception as e:
logging.error(f"Exception in fetchUserGroups: {e}")
return []


Expand All @@ -160,11 +174,12 @@ def generateFilterString(userToken):
userGroups = fetchUserGroups(userToken)

# Construct filter string
if userGroups:
group_ids = ", ".join([obj['id'] for obj in userGroups])
return f"{AZURE_SEARCH_PERMITTED_GROUPS_COLUMN}/any(g:search.in(g, '{group_ids}'))"

return None
if not userGroups:
logging.debug("No user groups found")

group_ids = ", ".join([obj['id'] for obj in userGroups])
return f"{AZURE_SEARCH_PERMITTED_GROUPS_COLUMN}/any(g:search.in(g, '{group_ids}'))"



def prepare_body_headers_with_data(request):
Expand Down Expand Up @@ -193,7 +208,12 @@ def prepare_body_headers_with_data(request):
userToken = None
if AZURE_SEARCH_PERMITTED_GROUPS_COLUMN:
userToken = request.headers.get('X-MS-TOKEN-AAD-ACCESS-TOKEN', "")
if DEBUG_LOGGING:
logging.debug(f"USER TOKEN is {'present' if userToken else 'not present'}")

filter = generateFilterString(userToken)
if DEBUG_LOGGING:
logging.debug(f"FILTER: {filter}")

body["dataSources"].append(
{
Expand Down Expand Up @@ -254,6 +274,16 @@ def prepare_body_headers_with_data(request):
body["dataSources"][0]["parameters"]["embeddingEndpoint"] = AZURE_OPENAI_EMBEDDING_ENDPOINT
body["dataSources"][0]["parameters"]["embeddingKey"] = AZURE_OPENAI_EMBEDDING_KEY

if DEBUG_LOGGING:
body_clean = copy.deepcopy(body)
if body_clean["dataSources"][0]["parameters"].get("key"):
body_clean["dataSources"][0]["parameters"]["key"] = "*****"
if body_clean["dataSources"][0]["parameters"].get("connectionString"):
body_clean["dataSources"][0]["parameters"]["connectionString"] = "*****"
if body_clean["dataSources"][0]["parameters"].get("embeddingKey"):
body_clean["dataSources"][0]["parameters"]["embeddingKey"] = "*****"

logging.debug(f"REQUEST BODY: {json.dumps(body_clean, indent=4)}")

headers = {
'Content-Type': 'application/json',
Expand Down Expand Up @@ -304,6 +334,8 @@ def stream_with_data(body, headers, endpoint, history_metadata={}):
response["choices"][0]["messages"].append(lineJson["choices"][0]["messages"][0]["delta"])
yield format_as_ndjson(response)
elif role == "assistant":
if response['apim-request-id'] and DEBUG_LOGGING:
logging.debug(f"RESPONSE apim-request-id: {response['apim-request-id']}")
response["choices"][0]["messages"].append({
"role": "assistant",
"content": ""
Expand Down

0 comments on commit 60e6b3e

Please sign in to comment.