Skip to content

Commit

Permalink
draft stream anthropic
Browse files Browse the repository at this point in the history
  • Loading branch information
adrienbanse committed Apr 4, 2024
1 parent 62582c2 commit d14b308
Showing 1 changed file with 27 additions and 9 deletions.
36 changes: 27 additions & 9 deletions genai_impact/tracers/anthropic_tracer.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,20 +10,24 @@
from anthropic import AsyncAnthropic as _AsyncAnthropic
from anthropic.types import Message as _Message
from anthropic.lib.streaming import MessageStreamManager as _MessageStreamManager
from anthropic.types.message_start_event import MessageStartEvent
from anthropic.types.message_delta_event import MessageDeltaEvent
except ImportError:
_Anthropic = object()
_AsyncAnthropic = object()
_Message = object()
_MessageStreamManager = object()


class Message(_Message):
impacts: Impacts


class MessageStreamManager(_MessageStreamManager):
impacts: Impacts

def __init__(self, parent, impacts):
self.__stream = parent._MessageStreamManager__stream
print(type(self.__stream))
self.__api_request = parent._MessageStreamManager__api_request
self.impacts = impacts

def compute_impacts_and_return_response(response: Any) -> Message:
model = models.find_model(provider="anthropic", model_name=response.model)
Expand Down Expand Up @@ -55,18 +59,32 @@ async def anthropic_async_chat_wrapper(


def compute_impacts_and_return_stream_response(response: Any) -> MessageStreamManager:
#TODO
return MessageStreamManager(**response.model_dump(), impacts=impacts)
output_tokens = 0
with response as stream:
for i, event in enumerate(stream):
if i == 0:
if type(event) is MessageStartEvent:
message = event.message
model = models.find_model(provider="anthropic", model_name=message.model)
output_tokens += message.usage.output_tokens
else:
print(f"Stream is not initialized with MessageStartEvent")
return response
elif type(event) is MessageDeltaEvent:
output_tokens += event.usage.output_tokens
model_size = model.active_parameters or model.active_parameters_range
impacts = compute_llm_impact(
model_parameter_count=model_size, output_token_count=output_tokens
)
return MessageStreamManager(response, impacts)


def anthropic_stream_chat_wrapper(
wrapped: Callable, instance: _Anthropic, args: Any, kwargs: Any # noqa: ARG001
) -> MessageStreamManager:
) -> MessageStreamManager:
response = wrapped(*args, **kwargs)
print("Print stream chat wrapper here")
print(vars(response))
return compute_impacts_and_return_stream_response(response)


class AnthropicInstrumentor:
def __init__(self) -> None:
self.wrapped_methods = [
Expand Down

0 comments on commit d14b308

Please sign in to comment.