Skip to content

Commit

Permalink
update
Browse files Browse the repository at this point in the history
  • Loading branch information
jhakulin committed May 18, 2024
1 parent 3dd4f5b commit 14ffbfb
Show file tree
Hide file tree
Showing 5 changed files with 38 additions and 17 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,7 @@ def get_client(
:return: The AI client.
:rtype: Union[OpenAI, AzureOpenAI, AsyncOpenAI, AsyncAzureOpenAI]
"""
api_version = os.getenv("AZURE_OPENAI_API_VERSION", api_version) or "2024-04-01-preview"
api_version = os.getenv("AZURE_OPENAI_API_VERSION", api_version) or "2024-05-01-preview"
client_key = (client_type, api_version)
if client_key in self._clients:
return self._clients[client_key]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@

from azure.ai.assistant.management.assistant_config import AssistantConfig
from azure.ai.assistant.management.async_assistant_client_callbacks import AsyncAssistantClientCallbacks
from azure.ai.assistant.management.async_message import AsyncConversationMessage
from azure.ai.assistant.management.text_message import TextMessage
from azure.ai.assistant.management.base_chat_assistant_client import BaseChatAssistantClient
from azure.ai.assistant.management.exceptions import EngineError, InvalidJSONError
from azure.ai.assistant.management.logger_module import logger
Expand Down Expand Up @@ -200,11 +202,11 @@ async def process_messages(
if thread_name:
max_text_messages = self._assistant_config.text_completion_config.max_text_messages if self._assistant_config.text_completion_config else None
conversation = await self._conversation_thread_client.retrieve_conversation(thread_name=thread_name, max_text_messages=max_text_messages)
for message in reversed(conversation.text_messages):
for message in reversed(conversation.messages):
if message.role == "user":
self._messages.append({"role": "user", "content": message.content})
self._messages.append({"role": "user", "content": message.text_message.content})
if message.role == "assistant":
self._messages.append({"role": "assistant", "content": message.content})
self._messages.append({"role": "assistant", "content": message.text_message.content})
elif user_request:
self._messages.append({"role": "user", "content": user_request})

Expand Down Expand Up @@ -327,7 +329,9 @@ async def _process_response_chunks(self, response, thread_name, run_id):
async for chunk in response:
delta = chunk.choices[0].delta if chunk.choices else None
if delta and delta.content:
await self._callbacks.on_run_update(self._name, run_id, "streaming", thread_name, is_first_message, delta.content)
message : AsyncConversationMessage = await AsyncConversationMessage.create(self.ai_client, None)
message.text_message = TextMessage(delta.content)
await self._callbacks.on_run_update(self._name, run_id, "streaming", thread_name, is_first_message, message)
collected_messages.append(delta.content)
is_first_message = False
if delta and delta.tool_calls:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -32,14 +32,18 @@ def __init__(self):
self._assistant_config_manager = None

@classmethod
async def create(cls, ai_client: Union[AsyncOpenAI, AsyncAzureOpenAI], original_message: Message):
async def create(cls,
ai_client: Union[AsyncOpenAI, AsyncAzureOpenAI],
original_message: Message = None
) -> 'AsyncConversationMessage':
instance = cls()
instance._ai_client = ai_client
instance._original_message = original_message
instance._role = original_message.role
instance._role = original_message.role if original_message else "assistant"
instance._assistant_config_manager = AssistantConfigManager.get_instance()
instance._sender = instance._get_sender_name(original_message)
await instance._process_message_contents(original_message)
if original_message:
instance._sender = instance._get_sender_name(original_message)
await instance._process_message_contents(original_message)
return instance

async def _process_message_contents(self, original_message: Message):
Expand Down Expand Up @@ -99,6 +103,10 @@ async def _process_text_annotations(self, content_item: TextContentBlock) -> Tup
def text_message(self) -> Optional[TextMessage]:
return self._text_message

@text_message.setter
def text_message(self, value: TextMessage):
self._text_message = value

@property
def file_message(self) -> Optional['AsyncFileMessage']:
return self._file_message
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@

from azure.ai.assistant.management.assistant_config import AssistantConfig
from azure.ai.assistant.management.assistant_client_callbacks import AssistantClientCallbacks
from azure.ai.assistant.management.message import ConversationMessage
from azure.ai.assistant.management.text_message import TextMessage
from azure.ai.assistant.management.base_chat_assistant_client import BaseChatAssistantClient
from azure.ai.assistant.management.exceptions import EngineError, InvalidJSONError
from azure.ai.assistant.management.logger_module import logger
Expand Down Expand Up @@ -183,11 +185,11 @@ def process_messages(
if thread_name:
max_text_messages = self._assistant_config.text_completion_config.max_text_messages if self._assistant_config.text_completion_config else None
conversation = self._conversation_thread_client.retrieve_conversation(thread_name=thread_name, max_text_messages=max_text_messages)
for message in reversed(conversation.text_messages):
for message in reversed(conversation.messages):
if message.role == "user":
self._messages.append({"role": "user", "content": message.content})
self._messages.append({"role": "user", "content": message.text_message.content})
if message.role == "assistant":
self._messages.append({"role": "assistant", "content": message.content})
self._messages.append({"role": "assistant", "content": message.text_message.content})
elif user_request:
self._messages.append({"role": "user", "content": user_request})

Expand Down Expand Up @@ -305,7 +307,9 @@ def _process_response_chunks(self, response, thread_name, run_id):
for chunk in response:
delta = chunk.choices[0].delta if chunk.choices else None
if delta and delta.content:
self._callbacks.on_run_update(self._name, run_id, "streaming", thread_name, is_first_message, delta.content)
message : ConversationMessage = ConversationMessage(self.ai_client)
message.text_message = TextMessage(delta.content)
self._callbacks.on_run_update(self._name, run_id, "streaming", thread_name, is_first_message, message)
collected_messages.append(delta.content)
is_first_message = False
if delta and delta.tool_calls:
Expand Down
13 changes: 9 additions & 4 deletions sdk/azure-ai-assistant/azure/ai/assistant/management/message.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,18 +23,19 @@
class ConversationMessage:
def __init__(self,
ai_client : Union[OpenAI, AzureOpenAI],
original_message: Message
original_message: Message = None
):
self._ai_client = ai_client
self._original_message = original_message
self._text_message = None
self._file_message = None
self._image_message = None
self._role = original_message.role
self._role = original_message.role if original_message else "assistant"
self._sender = None
self._assistant_config_manager = AssistantConfigManager.get_instance()
self._sender = self._get_sender_name(original_message)
self._process_message_contents(original_message)
if original_message:
self._sender = self._get_sender_name(original_message)
self._process_message_contents(original_message)

def _process_message_contents(self, original_message: Message):
for content_item in original_message.content:
Expand Down Expand Up @@ -92,6 +93,10 @@ def _process_text_annotations(self, content_item: TextContentBlock) -> Tuple[Lis
def text_message(self) -> Optional[TextMessage]:
return self._text_message

@text_message.setter
def text_message(self, value: TextMessage):
self._text_message = value

@property
def file_message(self) -> Optional['FileMessage']:
return self._file_message
Expand Down

0 comments on commit 14ffbfb

Please sign in to comment.