Skip to content

Commit

Permalink
Merge pull request #21 from jhakulin/main
Browse files Browse the repository at this point in the history
Conversation messages refactor
  • Loading branch information
jhakulin authored May 20, 2024
2 parents 0bce3ff + 214e94d commit 13efc61
Show file tree
Hide file tree
Showing 25 changed files with 1,003 additions and 485 deletions.
9 changes: 5 additions & 4 deletions gui/assistant_dialogs.py
Original file line number Diff line number Diff line change
Expand Up @@ -743,9 +743,10 @@ def pre_load_assistant_config(self, name):
# Accessing code interpreter files from the tool resources
if self.assistant_config.tool_resources:
code_interpreter_files = self.assistant_config.tool_resources.code_interpreter_files
for file_path, file_id in code_interpreter_files.items():
self.code_interpreter_files[file_path] = file_id
self.codeFileList.addItem(f"{file_path}")
if code_interpreter_files:
for file_path, file_id in code_interpreter_files.items():
self.code_interpreter_files[file_path] = file_id
self.codeFileList.addItem(f"{file_path}")
self.codeInterpreterCheckBox.setChecked(self.assistant_config.code_interpreter)

for vector_store in self.assistant_config.tool_resources.file_search_vector_stores:
Expand Down Expand Up @@ -1018,7 +1019,7 @@ def export_assistant(self):
shutil.copyfile(user_functions_src, os.path.join(functions_path, "user_functions.py"))

# Read template, replace placeholder, and create main.py
template_path = os.path.join("templates", "main_template.py")
template_path = os.path.join("templates", "async_stream_template.py")
try:
with open(template_path, "r") as template_file:
template_content = template_file.read()
Expand Down
43 changes: 27 additions & 16 deletions gui/conversation.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,9 +10,10 @@

import html, os, re, subprocess, sys
import base64
from typing import List

from azure.ai.assistant.management.assistant_config_manager import AssistantConfigManager
from azure.ai.assistant.management.message import TextMessage, FileMessage, ImageMessage
from azure.ai.assistant.management.message import ConversationMessage
from azure.ai.assistant.management.logger_module import logger


Expand Down Expand Up @@ -210,29 +211,39 @@ def is_dark_mode(self):
return lightness < 127
return False

def append_messages(self, messages):
def append_messages(self, messages: List[ConversationMessage]):
self.text_to_url_map = {}

for message in reversed(messages):
if message.type == "text":
text_message : TextMessage = message
# Handle text message content
if message.text_message:
text_message = message.text_message
# Determine the color based on the role and the theme
if self.is_dark_mode():
# Colors for dark mode
color = 'blue' if text_message.role != "assistant" else '#D3D3D3'
color = 'blue' if message.role != "assistant" else '#D3D3D3'
else:
# Colors for light mode
color = 'blue' if text_message.role != "assistant" else 'black'
if text_message.role == "assistant":
self.append_message(text_message.sender, text_message.content, color=color)
else:
self.append_message(text_message.sender, text_message.content, color=color)
elif message.type == "file":
file_message : FileMessage = message
color = 'blue' if message.role != "assistant" else 'black'

# Append the formatted text message
self.append_message(message.sender, text_message.content, color=color)

# Handle file message content
if message.file_message:
file_message = message.file_message
# Synchronously retrieve and process the file
file_path = file_message.retrieve_file(self.file_path)
elif message.type == "image_file":
image_message : ImageMessage = message
file_path = image_message.retrieve_image(self.file_path)
self.append_image(file_path, image_message.sender)
if file_path:
self.append_message(message.sender, f"File saved: {file_path}", color='green')

# Handle image message content
if message.image_message:
image_message = message.image_message
# Synchronously retrieve and process the image
image_path = image_message.retrieve_image(self.file_path)
if image_path:
self.append_image(image_path, message.sender)

def convert_image_to_base64(self, image_path):
with open(image_path, "rb") as image_file:
Expand Down
14 changes: 8 additions & 6 deletions gui/main_window.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,7 @@
from azure.ai.assistant.management.conversation_thread_client import ConversationThreadClient
from azure.ai.assistant.management.function_config_manager import FunctionConfigManager
from azure.ai.assistant.management.logger_module import logger
from azure.ai.assistant.management.message import ConversationMessage
from gui.menu import AssistantsMenu, FunctionsMenu, TasksMenu, SettingsMenu, DiagnosticsMenu
from gui.status_bar import ActivityStatus, StatusBar
from gui.speech_input_handler import SpeechInputHandler
Expand All @@ -42,7 +43,7 @@ def __init__(self):
}
self.connection_timeout : float = 90.0
self.use_system_assistant_for_thread_name : bool = False
self.use_streaming_for_assistant : bool = False
self.use_streaming_for_assistant : bool = True
self.user_text_summarization_in_synthesis : bool = False
self.active_ai_client_type = None
self.in_background = False
Expand Down Expand Up @@ -453,7 +454,7 @@ def on_function_call_processed(self, assistant_name, run_identifier, function_na
assistant_name, run_identifier, function_name, arguments, response
)

def on_run_update(self, assistant_name, run_identifier, run_status, thread_name, is_first_message = False, message=None):
def on_run_update(self, assistant_name, run_identifier, run_status, thread_name, is_first_message = False, message : ConversationMessage = None):
logger.info(f"Run update for assistant {assistant_name} with run identifier {run_identifier}, status {run_status}, and thread name {thread_name}")

is_current_thread = self.conversation_thread_clients[self.active_ai_client_type].is_current_conversation_thread(thread_name)
Expand All @@ -462,8 +463,9 @@ def on_run_update(self, assistant_name, run_identifier, run_status, thread_name,
return

if run_status == "streaming":
logger.info(f"Run update for assistant {assistant_name} with run identifier {run_identifier} and status {run_status} is streaming")
self.conversation_append_chunk_signal.append_signal.emit(assistant_name, message, is_first_message)
if message.text_message:
logger.info(f"Run update for assistant {assistant_name} with run identifier {run_identifier} and status {run_status} is streaming")
self.conversation_append_chunk_signal.append_signal.emit(assistant_name, message.text_message.content, is_first_message)
return

conversation = self.conversation_thread_clients[self.active_ai_client_type].retrieve_conversation(thread_name, timeout=self.connection_timeout)
Expand Down Expand Up @@ -516,8 +518,8 @@ def on_run_end(self, assistant_name, run_identifier, run_end_time, thread_name):

# copy files from conversation to output folder at the end of the run
for message in conversation.messages:
if message.type == "file":
file_path = message.retrieve_file(assistant_config.output_folder_path)
if message.file_message:
file_path = message.file_message.retrieve_file(assistant_config.output_folder_path)
logger.debug(f"File downloaded to {file_path} on run end")

# Callbacks for TaskManagerCallbacks
Expand Down
2 changes: 1 addition & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
# Assistant middleware from GitHub Release
https://github.com/Azure-Samples/azureai-assistant-tool/releases/download/v0.3.7-alpha/azure_ai_assistant-0.3.7a1-py3-none-any.whl
https://github.com/Azure-Samples/azureai-assistant-tool/releases/download/v0.4.0-alpha/azure_ai_assistant-0.4.0a1-py3-none-any.whl

# GUI Framework
PySide6
Expand Down
7 changes: 6 additions & 1 deletion samples/MultiAgentCodeOrchestration/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@
from azure.ai.assistant.management.async_conversation_thread_client import AsyncConversationThreadClient
from azure.ai.assistant.management.async_task_manager import AsyncTaskManager, AsyncMultiTask
from azure.ai.assistant.management.async_task_manager_callbacks import AsyncTaskManagerCallbacks
from azure.ai.assistant.management.assistant_config_manager import AssistantConfigManager

from typing import Dict, List
import json, re
Expand Down Expand Up @@ -74,7 +75,7 @@ async def on_run_end(self, assistant_name, run_identifier, run_end_time, thread_
else:
conversation = await self.conversation_thread_client.retrieve_conversation(thread_name)
message = conversation.get_last_text_message(assistant_name)
print(f"\n{message}")
print(f"\n{message.content}")
if assistant_name == "CodeProgrammerAgent":
# Extract the JSON code block from the response by using the FileCreatorAgent
await self._assistants["FileCreatorAgent"].process_messages(user_request=message.content)
Expand Down Expand Up @@ -153,6 +154,8 @@ def requires_user_confirmation(assistant_response: str):


async def main():
# Use the AssistantConfigManager to save the assistant configurations at the end of the session
assistant_config_manager = AssistantConfigManager.get_instance('config')
assistant_names = ["CodeProgrammerAgent", "CodeInspectionAgent", "TaskPlannerAgent", "FileCreatorAgent"]
orchestrator = MultiAgentOrchestrator()
assistants = await initialize_assistants(assistant_names, orchestrator)
Expand Down Expand Up @@ -183,6 +186,8 @@ async def main():
await task_manager.schedule_task(multi_task)
await orchestrator.wait_for_all_tasks()

assistant_config_manager.save_configs()

await conversation_thread_client.close()

if __name__ == "__main__":
Expand Down
19 changes: 16 additions & 3 deletions samples/PetTravelPlanChatAssistant/main.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,9 @@
from azure.ai.assistant.management.ai_client_factory import AsyncAIClientType
from azure.ai.assistant.management.async_assistant_client_callbacks import AsyncAssistantClientCallbacks
from azure.ai.assistant.management.async_conversation_thread_client import AsyncConversationThreadClient
from azure.ai.assistant.management.async_message import AsyncConversationMessage
from azure.ai.assistant.management.assistant_config_manager import AssistantConfigManager
from azure.ai.assistant.management.text_message import TextMessage

import os, asyncio, yaml
import azure.identity.aio
Expand All @@ -18,10 +21,16 @@ def __init__(self, message_queue):
async def handle_message(self, action, message=""):
await self.message_queue.put((action, message))

async def on_run_update(self, assistant_name, run_identifier, run_status, thread_name, is_first_message=False, message=None):
async def on_run_update(self, assistant_name, run_identifier, run_status, thread_name, is_first_message=False, message : AsyncConversationMessage = None):
if run_status == "streaming":
await self.handle_message("start" if is_first_message else "message", message)

await self.handle_message("start" if is_first_message else "message", message.text_message.content)
elif run_status == "completed":
if message:
text_message : TextMessage = message.text_message
if text_message.file_citations:
for file_citation in text_message.file_citations:
print(f"\nFile citation, file_id: {file_citation.file_id}, file_name: {file_citation.file_name}")

async def on_run_end(self, assistant_name, run_identifier, run_end_time, thread_name, response=None):
pass

Expand Down Expand Up @@ -94,6 +103,9 @@ async def main():
message_queue = asyncio.Queue()
callbacks = MyAssistantClientCallbacks(message_queue)

# Use the AssistantConfigManager to save the assistant configuration locally at the end of the session
assistant_config_manager = AssistantConfigManager.get_instance('config')

# Create an instance of the AsyncChatAssistantClient
assistant_client = await AsyncChatAssistantClient.from_yaml(yaml.dump(config), callbacks=callbacks, **client_args)
ai_client_type = AsyncAIClientType[assistant_client.assistant_config.ai_client_type]
Expand Down Expand Up @@ -128,6 +140,7 @@ async def main():
await message_queue.join()
display_task.cancel()
await conversation_thread_client.close()
assistant_config_manager.save_config(assistant_client.name)

if __name__ == "__main__":
asyncio.run(main())
Expand Down
2 changes: 1 addition & 1 deletion sdk/azure-ai-assistant/azure/ai/assistant/_version.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,4 +6,4 @@
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------

VERSION = "0.3.7a1"
VERSION = "0.4.0a1"
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ def get_client(
:return: The AI client.
:rtype: Union[OpenAI, AzureOpenAI, AsyncOpenAI, AsyncAzureOpenAI]
"""
api_version = os.getenv("AZURE_OPENAI_API_VERSION", api_version) or "2024-02-15-preview"
api_version = os.getenv("AZURE_OPENAI_API_VERSION", api_version) or "2024-05-01-preview"
client_key = (client_type, api_version)
if client_key in self._clients:
return self._clients[client_key]
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
# Copyright (c) Microsoft. All rights reserved.
# Licensed under the MIT license. See LICENSE.md file in the project root for full license information.

from azure.ai.assistant.management.message import ConversationMessage


class AssistantClientCallbacks:
def on_run_start(self, assistant_name, run_identifier, run_start_time, user_input):
Expand All @@ -20,7 +22,7 @@ def on_run_start(self, assistant_name, run_identifier, run_start_time, user_inpu
"""
pass

def on_run_update(self, assistant_name, run_identifier, run_status, thread_name, is_first_message=False, message=None):
def on_run_update(self, assistant_name, run_identifier, run_status, thread_name, is_first_message=False, message : ConversationMessage = None):
"""Callback for when a run updates.
:param assistant_name: The name of the assistant.
Expand All @@ -33,8 +35,8 @@ def on_run_update(self, assistant_name, run_identifier, run_status, thread_name,
:type thread_name: str
:param is_first_message: Whether the message is the first message, defaults to False, used when status is "streaming"
:type is_first_message: bool, optional
:param message: The partial message during the run, defaults to None. Used when status is "streaming"
:type message: str, optional
:param message: Can be partial message (streaming text content) or full message with files, citations (completed), defaults to None
:type message: ConversationMessage, optional
:return: None
:rtype: None
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
# Copyright (c) Microsoft. All rights reserved.
# Licensed under the MIT license. See LICENSE.md file in the project root for full license information.

from azure.ai.assistant.management.async_message import AsyncConversationMessage


class AsyncAssistantClientCallbacks:
async def on_run_start(self, assistant_name, run_identifier, run_start_time, user_input):
Expand All @@ -20,7 +22,7 @@ async def on_run_start(self, assistant_name, run_identifier, run_start_time, use
"""
pass

async def on_run_update(self, assistant_name, run_identifier, run_status, thread_name, is_first_message=False, message=None):
async def on_run_update(self, assistant_name, run_identifier, run_status, thread_name, is_first_message=False, message : AsyncConversationMessage = None):
"""Callback for when a run updates.
:param assistant_name: The name of the assistant.
Expand All @@ -33,8 +35,8 @@ async def on_run_update(self, assistant_name, run_identifier, run_status, thread
:type thread_name: str
:param is_first_message: Whether the message is the first message, defaults to False, used when status is "streaming"
:type is_first_message: bool, optional
:param message: The partial message during the run, defaults to None. Used when status is "streaming"
:type message: str, optional
:param message: Can be partial message (streaming with text content) or full test message with files, citations (completed), defaults to None
:type message: AsyncConversationMessage, optional
:return: None
:rtype: None
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@

from azure.ai.assistant.management.assistant_config import AssistantConfig
from azure.ai.assistant.management.async_assistant_client_callbacks import AsyncAssistantClientCallbacks
from azure.ai.assistant.management.async_message import AsyncConversationMessage
from azure.ai.assistant.management.text_message import TextMessage
from azure.ai.assistant.management.base_chat_assistant_client import BaseChatAssistantClient
from azure.ai.assistant.management.exceptions import EngineError, InvalidJSONError
from azure.ai.assistant.management.logger_module import logger
Expand Down Expand Up @@ -200,11 +202,11 @@ async def process_messages(
if thread_name:
max_text_messages = self._assistant_config.text_completion_config.max_text_messages if self._assistant_config.text_completion_config else None
conversation = await self._conversation_thread_client.retrieve_conversation(thread_name=thread_name, max_text_messages=max_text_messages)
for message in reversed(conversation.text_messages):
for message in reversed(conversation.messages):
if message.role == "user":
self._messages.append({"role": "user", "content": message.content})
self._messages.append({"role": "user", "content": message.text_message.content})
if message.role == "assistant":
self._messages.append({"role": "assistant", "content": message.content})
self._messages.append({"role": "assistant", "content": message.text_message.content})
elif user_request:
self._messages.append({"role": "user", "content": user_request})

Expand Down Expand Up @@ -327,7 +329,9 @@ async def _process_response_chunks(self, response, thread_name, run_id):
async for chunk in response:
delta = chunk.choices[0].delta if chunk.choices else None
if delta and delta.content:
await self._callbacks.on_run_update(self._name, run_id, "streaming", thread_name, is_first_message, delta.content)
message : AsyncConversationMessage = await AsyncConversationMessage.create(self.ai_client, None)
message.text_message = TextMessage(delta.content)
await self._callbacks.on_run_update(self._name, run_id, "streaming", thread_name, is_first_message, message)
collected_messages.append(delta.content)
is_first_message = False
if delta and delta.tool_calls:
Expand Down
Loading

0 comments on commit 13efc61

Please sign in to comment.