Skip to content

Commit

Permalink
Fix session initialization and string formatting in Assistants API in…
Browse files Browse the repository at this point in the history
…tegration

Co-Authored-By: Alex Reibman <[email protected]>
  • Loading branch information
devin-ai-integration[bot] and areibman committed Dec 11, 2024
1 parent dc97a4a commit 8bed65f
Show file tree
Hide file tree
Showing 3 changed files with 75 additions and 69 deletions.
40 changes: 10 additions & 30 deletions agentops/integrations/openai_assistants.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,6 +12,7 @@
from ..session import Session
from ..enums import EventType


class AssistantAgent:
"""Wrapper for OpenAI Assistant that maps to AgentOps Agent."""

Expand Down Expand Up @@ -46,11 +47,7 @@ def add_message(self, thread_id: str, content: str) -> str:
thread_id: The thread to add the message to
content: The message content
"""
message = self._client.beta.threads.messages.create(
thread_id=thread_id,
role="user",
content=content
)
message = self._client.beta.threads.messages.create(thread_id=thread_id, role="user", content=content)
return message.id

def run(self, thread_id: str, instructions: Optional[str] = None) -> Dict[str, Any]:
Expand All @@ -63,47 +60,30 @@ def run(self, thread_id: str, instructions: Optional[str] = None) -> Dict[str, A
"""
# Create run
run = self._client.beta.threads.runs.create(
thread_id=thread_id,
assistant_id=self.assistant_id,
instructions=instructions
thread_id=thread_id, assistant_id=self.assistant_id, instructions=instructions
)

# Record initial run event
self._record_run_event(run)

# Poll for completion
while run.status in ['queued', 'in_progress']:
run = self._client.beta.threads.runs.retrieve(
thread_id=thread_id,
run_id=run.id
)
while run.status in ["queued", "in_progress"]:
run = self._client.beta.threads.runs.retrieve(thread_id=thread_id, run_id=run.id)
self._record_run_event(run)

# Get messages after completion
messages = self._client.beta.threads.messages.list(
thread_id=thread_id
)
messages = self._client.beta.threads.messages.list(thread_id=thread_id)

return {
'run': run,
'messages': messages
}
return {"run": run, "messages": messages}

def _record_run_event(self, run: Any) -> None:
"""Record run as LLMEvent and tool events if applicable."""
# Record main LLM interaction
event = LLMEvent(
thread_id=UUID(run.thread_id),
model=run.model,
completion=str(run.status)
)
event = LLMEvent(thread_id=UUID(run.thread_id), model=run.model, completion=str(run.status))
self.session.record(event)

# Record tool usage if any
if run.required_action and run.required_action.type == 'submit_tool_outputs':
if run.required_action and run.required_action.type == "submit_tool_outputs":
for tool_call in run.required_action.submit_tool_outputs.tool_calls:
tool_event = ToolEvent(
name=tool_call.function.name,
logs={'arguments': tool_call.function.arguments}
)
tool_event = ToolEvent(name=tool_call.function.name, logs={"arguments": tool_call.function.arguments})
self.session.record(tool_event)
11 changes: 5 additions & 6 deletions examples/openai_examples/assistant_example.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,34 +3,33 @@
from agentops import Session
from agentops.integrations.openai_assistants import AssistantAgent


def main():
# Create session
session = Session()

# Initialize assistant
agent = AssistantAgent(
assistant_id="YOUR_ASSISTANT_ID", # Replace with actual assistant ID
session=session
session=session,
)

# Create thread
thread_id = agent.create_thread()

# Add message
agent.add_message(
thread_id=thread_id,
content="What is 2+2? Please use the code interpreter."
)
agent.add_message(thread_id=thread_id, content="What is 2+2? Please use the code interpreter.")

# Run assistant
result = agent.run(thread_id)

# Print messages
for msg in result['messages']:
for msg in result["messages"]:
print(f"{msg.role}: {msg.content[0].text.value}")

# End session
session.end_session()


if __name__ == "__main__":
main()
93 changes: 60 additions & 33 deletions tests/test_assistant.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,59 +6,86 @@
from agentops.integrations.openai_assistants import AssistantAgent
from agentops.session import Session
from agentops.event import LLMEvent, ToolEvent
from agentops.config import Configuration


@pytest.fixture
def mock_openai():
with patch('agentops.integrations.openai_assistants.OpenAI') as mock:
# Mock assistant
mock.return_value.beta.assistants.retrieve.return_value = Mock(
id='test_assistant',
model='gpt-4',
name='Test Assistant'
)
with patch("agentops.integrations.openai_assistants.OpenAI") as mock:
# Mock client for configuration
mock_client = Mock()
mock_client.add_pre_init_warning = Mock()

# Mock assistant with proper name attribute
assistant_mock = Mock()
assistant_mock.id = "test_assistant"
assistant_mock.model = "gpt-4"
assistant_mock.name = "Test Assistant"
mock.return_value.beta.assistants.retrieve.return_value = assistant_mock

# Mock thread
mock.return_value.beta.threads.create.return_value = Mock(
id='test_thread'
)
mock.return_value.beta.threads.create.return_value = Mock(id="test_thread")

# Mock message
mock.return_value.beta.threads.messages.create.return_value = Mock(
id='test_message'
)
mock.return_value.beta.threads.messages.create.return_value = Mock(id="test_message")

# Mock run
mock.return_value.beta.threads.runs.create.return_value = Mock(
id='test_run',
thread_id='test_thread',
status='completed',
model='gpt-4'
)
# Mock run with valid UUID thread_id
run_mock = Mock()
run_mock.id = "test_run"
run_mock.thread_id = "12345678-1234-5678-1234-567812345678"
run_mock.status = "completed"
run_mock.model = "gpt-4"
mock.return_value.beta.threads.runs.create.return_value = run_mock
mock.return_value.beta.threads.runs.retrieve.return_value = run_mock

# Mock HttpClient for session creation
with patch("agentops.session.HttpClient") as http_mock:
response_mock = Mock()
response_mock.code = 200
response_mock.body = {"jwt": "test-jwt"}
http_mock.post.return_value = response_mock
yield mock, mock_client

yield mock

def test_assistant_creation(mock_openai):
session = Session()
agent = AssistantAgent('test_assistant', session)
mock_openai_client, mock_client = mock_openai
config = Configuration()
config.configure(mock_client, api_key="12345678-1234-5678-1234-567812345678")
session = Session(session_id=UUID("12345678-1234-5678-1234-567812345678"), config=config)
agent = AssistantAgent("test_assistant", session)

assert agent.assistant_id == "test_assistant"
assert agent.model == "gpt-4"
assert agent.name == "Test Assistant"

assert agent.assistant_id == 'test_assistant'
assert agent.model == 'gpt-4'
assert agent.name == 'Test Assistant'

def test_thread_creation(mock_openai):
session = Session()
agent = AssistantAgent('test_assistant', session)
mock_openai_client, mock_client = mock_openai
config = Configuration()
config.configure(mock_client, api_key="12345678-1234-5678-1234-567812345678")
session = Session(session_id=UUID("12345678-1234-5678-1234-567812345678"), config=config)
agent = AssistantAgent("test_assistant", session)

thread_id = agent.create_thread()
assert thread_id == 'test_thread'
assert session.thread_id == 'test_thread'
assert thread_id == "test_thread"
assert session.thread_id == "test_thread"


def test_run_recording(mock_openai):
session = Session()
agent = AssistantAgent('test_assistant', session)
mock_openai_client, mock_client = mock_openai
config = Configuration()
config.configure(mock_client, api_key="12345678-1234-5678-1234-567812345678")
session = Session(session_id=UUID("12345678-1234-5678-1234-567812345678"), config=config)

# Debug: verify session is running
assert session.is_running, "Session failed to start"

agent = AssistantAgent("test_assistant", session)
thread_id = agent.create_thread()
result = agent.run(thread_id)

# Debug: print event counts
print("Event counts:", session.event_counts)

# Verify LLMEvent was recorded
assert session.event_counts['llms'] > 0
assert session.event_counts["llms"] > 0

0 comments on commit 8bed65f

Please sign in to comment.