diff --git a/ecologits/tracers/openai_tracer.py b/ecologits/tracers/openai_tracer.py index b92768d..1cf21a0 100644 --- a/ecologits/tracers/openai_tracer.py +++ b/ecologits/tracers/openai_tracer.py @@ -76,7 +76,7 @@ def openai_chat_wrapper_stream( token_count = 0 for i, chunk in enumerate(stream): # azure openai has an empty first chunk so we skip it - if i==0 and chunk.model=="": + if i == 0 and chunk.model == "": continue if i > 0 and chunk.choices[0].finish_reason is None: token_count += 1 diff --git a/tests/test_openai.py b/tests/test_openai.py index a38da61..a055b13 100644 --- a/tests/test_openai.py +++ b/tests/test_openai.py @@ -2,6 +2,7 @@ import pytest from openai import OpenAI, AsyncOpenAI, AzureOpenAI, AsyncAzureOpenAI + @pytest.mark.vcr def test_openai_chat(tracer_init): client = OpenAI() @@ -60,7 +61,8 @@ def test_azure_openai_chat(tracer_init): ) assert len(response.choices) > 0 assert response.impacts.energy.value > 0 - + + @pytest.mark.vcr @pytest.mark.asyncio async def test_azure_openai_async_chat(tracer_init): @@ -72,6 +74,7 @@ async def test_azure_openai_async_chat(tracer_init): assert len(response.choices) > 0 assert response.impacts.energy.value > 0 + @pytest.mark.vcr def test_azure_openai_stream_chat(tracer_init): client = AzureOpenAI(azure_endpoint=os.getenv("AZURE_OPENAI_ENDPOINT"), @@ -83,4 +86,3 @@ def test_azure_openai_stream_chat(tracer_init): ) for chunk in stream: assert chunk.impacts.energy.value >= 0 -