Skip to content

Commit

Permalink
Merge pull request #14 from dataforgoodfr/feat/anthropic-tracer
Browse files Browse the repository at this point in the history
Implement tracer mode with tests
  • Loading branch information
samuelrince authored Mar 16, 2024
2 parents 9c4fb30 + 90d899c commit 283811f
Show file tree
Hide file tree
Showing 19 changed files with 751 additions and 56 deletions.
5 changes: 0 additions & 5 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -5,11 +5,6 @@ repos:
hooks:
- id: ruff
args: [--fix, --exit-non-zero-on-fix]
- repo: https://github.com/psf/black
rev: 22.3.0
hooks:
- id: black
language_version: python3
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.3.0
hooks:
Expand Down
9 changes: 9 additions & 0 deletions Makefile
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@

install:
poetry install --all-extras --with dev,docs

test:
poetry run pytest

test-record:
poetry run pytest --record-mode=once
5 changes: 4 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,10 @@ Coming soon...
## 🚀 Usage

```python
from genai_impact import OpenAI
from genai_impact import Tracer
from openai import OpenAI

Tracer.init()

client = OpenAI(
api_key="<OPENAI_API_KEY>",
Expand Down
4 changes: 2 additions & 2 deletions genai_impact/__init__.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
from genai_impact.client import Anthropic, MistralClient, OpenAI
from .tracer import Tracer

__all__ = ["OpenAI", "MistralClient", "Anthropic"]
__all__ = ["Tracer"]
5 changes: 0 additions & 5 deletions genai_impact/client/__init__.py

This file was deleted.

38 changes: 38 additions & 0 deletions genai_impact/tracer.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
import importlib.util


class Tracer:

@staticmethod
def init() -> None:
init_instruments()


def init_instruments() -> None:
init_openai_instrumentor()
init_anthropic_instrumentor()
init_mistralai_instrumentor()


def init_openai_instrumentor() -> None:
if importlib.util.find_spec("openai") is not None:
from genai_impact.tracers.openai_tracer import OpenAIInstrumentor

instrumentor = OpenAIInstrumentor()
instrumentor.instrument()


def init_anthropic_instrumentor() -> None:
if importlib.util.find_spec("anthropic") is not None:
from genai_impact.tracers.anthropic_tracer import AnthropicInstrumentor

instrumentor = AnthropicInstrumentor()
instrumentor.instrument()


def init_mistralai_instrumentor() -> None:
if importlib.util.find_spec("mistralai") is not None:
from genai_impact.tracers.mistralai_tracer import MistralAIInstrumentor

instrumentor = MistralAIInstrumentor()
instrumentor.instrument()
Empty file.
Original file line number Diff line number Diff line change
Expand Up @@ -11,33 +11,48 @@
_Anthropic = object()
_Message = object()


# model names found here: https://docs.anthropic.com/claude/docs/models-overview#model-recommendations
# TODO update model sizes for anthropic
_MODEL_SIZES = {
"claude-3-opus-20240229": 70, # fake data
"claude-3-haiku-20240307": 10,
"claude-3-sonnet-20240229": 10, # fake data
"claude-3-opus-20240229": 440, # fake data
}


class Message(_Message):
impacts: Impacts


def chat_wrapper(
wrapped: Callable, instance: _Anthropic, args: Any, kwargs: Any # noqa: ARG001
) -> Message:
response = wrapped(*args, **kwargs)
def _set_impacts(response: Message) -> Impacts:
model_size = _MODEL_SIZES.get(response.model)
output_tokens = response.usage.output_tokens
impacts = compute_llm_impact(
model_parameter_count=model_size, output_token_count=output_tokens
)
return Message(**response.model_dump(), impacts=impacts)
return impacts


class Anthropic(_Anthropic):
def __init__(self, **kwargs: Any) -> None:
super().__init__(**kwargs)
def anthropic_chat_wrapper(
wrapped: Callable, instance: _Anthropic, args: Any, kwargs: Any # noqa: ARG001
) -> Message:
response = wrapped(*args, **kwargs)
impacts = _set_impacts(response)
return Message(**response.model_dump(), impacts=impacts)


wrap_function_wrapper("anthropic.resources", "Messages.create", chat_wrapper)
class AnthropicInstrumentor:
def __init__(self) -> None:
self.wrapped_methods = [
{
"module": "anthropic.resources",
"name": "Messages.create",
"wrapper": anthropic_chat_wrapper,
},
]

def instrument(self) -> None:
for wrapper in self.wrapped_methods:
wrap_function_wrapper(
wrapper["module"],
wrapper["name"],
wrapper["wrapper"]
)
Original file line number Diff line number Diff line change
Expand Up @@ -18,15 +18,15 @@
"mistral-tiny": 7.3,
"mistral-small": 12.9, # mixtral active parameters count
"mistral-medium": 70,
"mistral-large": 220,
"mistral-large": 440,
}


class ChatCompletionResponse(_ChatCompletionResponse):
impacts: Impacts


def chat_wrapper(
def mistralai_chat_wrapper(
wrapped: Callable, instance: _MistralClient, args: Any, kwargs: Any # noqa: ARG001
) -> ChatCompletionResponse:
response = wrapped(*args, **kwargs)
Expand All @@ -38,8 +38,20 @@ def chat_wrapper(
return ChatCompletionResponse(**response.model_dump(), impacts=impacts)


class MistralClient(_MistralClient):
def __init__(self, **kwargs: Any) -> None:
super().__init__(**kwargs)

wrap_function_wrapper("mistralai.client", "MistralClient.chat", chat_wrapper)
class MistralAIInstrumentor:
def __init__(self) -> None:
self.wrapped_methods = [
{
"module": "mistralai.client",
"name": "MistralClient.chat",
"wrapper": mistralai_chat_wrapper,
},
]

def instrument(self) -> None:
for wrapper in self.wrapped_methods:
wrap_function_wrapper(
wrapper["module"],
wrapper["name"],
wrapper["wrapper"]
)
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
from typing import Any, Callable

from openai import OpenAI as _OpenAI
from openai.resources.chat import Completions
from openai.types.chat import ChatCompletion as _ChatCompletion
from wrapt import wrap_function_wrapper
Expand All @@ -12,27 +11,27 @@
"gpt-4-turbo-preview": None,
"gpt-4-1106-preview": None,
"gpt-4-vision-preview": None,
"gpt-4": 220,
"gpt-4-0314": 220,
"gpt-4-0613": 220,
"gpt-4-32k": 220,
"gpt-4-32k-0314": 220,
"gpt-4-32k-0613": 220,
"gpt-3.5-turbo": 20,
"gpt-3.5-turbo-16k": 20,
"gpt-3.5-turbo-0301": 20,
"gpt-3.5-turbo-0613": 20,
"gpt-3.5-turbo-1106": 20,
"gpt-3.5-turbo-0125": 20,
"gpt-3.5-turbo-16k-0613": 20,
"gpt-4": 440,
"gpt-4-0314": 440,
"gpt-4-0613": 440,
"gpt-4-32k": 440,
"gpt-4-32k-0314": 440,
"gpt-4-32k-0613": 440,
"gpt-3.5-turbo": 70,
"gpt-3.5-turbo-16k": 70,
"gpt-3.5-turbo-0301": 70,
"gpt-3.5-turbo-0613": 70,
"gpt-3.5-turbo-1106": 70,
"gpt-3.5-turbo-0125": 70,
"gpt-3.5-turbo-16k-0613": 70,
}


class ChatCompletion(_ChatCompletion):
impacts: Impacts


def chat_wrapper(
def openai_chat_wrapper(
wrapped: Callable, instance: Completions, args: Any, kwargs: Any # noqa: ARG001
) -> ChatCompletion:
response = wrapped(*args, **kwargs)
Expand All @@ -44,10 +43,20 @@ def chat_wrapper(
return ChatCompletion(**response.model_dump(), impacts=impacts)


class OpenAI(_OpenAI):
def __init__(self, **kwargs: Any) -> None:
super().__init__(**kwargs)

wrap_function_wrapper(
"openai.resources.chat.completions", "Completions.create", chat_wrapper
)
class OpenAIInstrumentor:
def __init__(self) -> None:
self.wrapped_methods = [
{
"module": "openai.resources.chat.completions",
"name": "Completions.create",
"wrapper": openai_chat_wrapper,
},
]

def instrument(self) -> None:
for wrapper in self.wrapped_methods:
wrap_function_wrapper(
wrapper["module"],
wrapper["name"],
wrapper["wrapper"]
)
Loading

0 comments on commit 283811f

Please sign in to comment.