|
26 | 26 | ResponseOutputMessage, |
27 | 27 | ResponseOutputText, |
28 | 28 | ResponseUsage, |
| 29 | + ParsedResponse, |
| 30 | + ) |
| 31 | + from openai.types.responses.parsed_response import ( |
| 32 | + ParsedResponseOutputMessage, |
| 33 | + ParsedResponseOutputText, |
29 | 34 | ) |
30 | 35 |
|
31 | 36 | from posthog.ai.openai import OpenAI |
@@ -115,6 +120,59 @@ def mock_openai_response_with_responses_api(): |
115 | 120 | ) |
116 | 121 |
|
117 | 122 |
|
| 123 | +@pytest.fixture |
| 124 | +def mock_parsed_response(): |
| 125 | + return ParsedResponse( |
| 126 | + id="test", |
| 127 | + model="gpt-4o-2024-08-06", |
| 128 | + object="response", |
| 129 | + created_at=1741476542, |
| 130 | + status="completed", |
| 131 | + error=None, |
| 132 | + incomplete_details=None, |
| 133 | + instructions=None, |
| 134 | + max_output_tokens=None, |
| 135 | + tools=[], |
| 136 | + tool_choice="auto", |
| 137 | + output=[ |
| 138 | + ParsedResponseOutputMessage( |
| 139 | + id="msg_123", |
| 140 | + type="message", |
| 141 | + role="assistant", |
| 142 | + status="completed", |
| 143 | + content=[ |
| 144 | + ParsedResponseOutputText( |
| 145 | + type="output_text", |
| 146 | + text='{"name": "Science Fair", "date": "Friday", "participants": ["Alice", "Bob"]}', |
| 147 | + annotations=[], |
| 148 | + parsed={ |
| 149 | + "name": "Science Fair", |
| 150 | + "date": "Friday", |
| 151 | + "participants": ["Alice", "Bob"], |
| 152 | + }, |
| 153 | + ) |
| 154 | + ], |
| 155 | + ) |
| 156 | + ], |
| 157 | + output_parsed={ |
| 158 | + "name": "Science Fair", |
| 159 | + "date": "Friday", |
| 160 | + "participants": ["Alice", "Bob"], |
| 161 | + }, |
| 162 | + parallel_tool_calls=True, |
| 163 | + previous_response_id=None, |
| 164 | + usage=ResponseUsage( |
| 165 | + input_tokens=15, |
| 166 | + output_tokens=20, |
| 167 | + input_tokens_details={"prompt_tokens": 15, "cached_tokens": 0}, |
| 168 | + output_tokens_details={"reasoning_tokens": 5}, |
| 169 | + total_tokens=35, |
| 170 | + ), |
| 171 | + user=None, |
| 172 | + metadata={}, |
| 173 | + ) |
| 174 | + |
| 175 | + |
118 | 176 | @pytest.fixture |
119 | 177 | def mock_embedding_response(): |
120 | 178 | return CreateEmbeddingResponse( |
@@ -646,3 +704,73 @@ def test_responses_api(mock_client, mock_openai_response_with_responses_api): |
646 | 704 | assert props["$ai_http_status"] == 200 |
647 | 705 | assert props["foo"] == "bar" |
648 | 706 | assert isinstance(props["$ai_latency"], float) |
| 707 | + |
| 708 | + |
| 709 | +def test_responses_parse(mock_client, mock_parsed_response): |
| 710 | + with patch( |
| 711 | + "openai.resources.responses.Responses.parse", |
| 712 | + return_value=mock_parsed_response, |
| 713 | + ): |
| 714 | + client = OpenAI(api_key="test-key", posthog_client=mock_client) |
| 715 | + response = client.responses.parse( |
| 716 | + model="gpt-4o-2024-08-06", |
| 717 | + input=[ |
| 718 | + {"role": "system", "content": "Extract the event information."}, |
| 719 | + { |
| 720 | + "role": "user", |
| 721 | + "content": "Alice and Bob are going to a science fair on Friday.", |
| 722 | + }, |
| 723 | + ], |
| 724 | + text={ |
| 725 | + "format": { |
| 726 | + "type": "json_schema", |
| 727 | + "json_schema": { |
| 728 | + "name": "event", |
| 729 | + "schema": { |
| 730 | + "type": "object", |
| 731 | + "properties": { |
| 732 | + "name": {"type": "string"}, |
| 733 | + "date": {"type": "string"}, |
| 734 | + "participants": { |
| 735 | + "type": "array", |
| 736 | + "items": {"type": "string"}, |
| 737 | + }, |
| 738 | + }, |
| 739 | + "required": ["name", "date", "participants"], |
| 740 | + }, |
| 741 | + }, |
| 742 | + } |
| 743 | + }, |
| 744 | + posthog_distinct_id="test-id", |
| 745 | + posthog_properties={"foo": "bar"}, |
| 746 | + ) |
| 747 | + |
| 748 | + assert response == mock_parsed_response |
| 749 | + assert mock_client.capture.call_count == 1 |
| 750 | + |
| 751 | + call_args = mock_client.capture.call_args[1] |
| 752 | + props = call_args["properties"] |
| 753 | + |
| 754 | + assert call_args["distinct_id"] == "test-id" |
| 755 | + assert call_args["event"] == "$ai_generation" |
| 756 | + assert props["$ai_provider"] == "openai" |
| 757 | + assert props["$ai_model"] == "gpt-4o-2024-08-06" |
| 758 | + assert props["$ai_input"] == [ |
| 759 | + {"role": "system", "content": "Extract the event information."}, |
| 760 | + { |
| 761 | + "role": "user", |
| 762 | + "content": "Alice and Bob are going to a science fair on Friday.", |
| 763 | + }, |
| 764 | + ] |
| 765 | + assert props["$ai_output_choices"] == [ |
| 766 | + { |
| 767 | + "role": "assistant", |
| 768 | + "content": '{"name": "Science Fair", "date": "Friday", "participants": ["Alice", "Bob"]}', |
| 769 | + } |
| 770 | + ] |
| 771 | + assert props["$ai_input_tokens"] == 15 |
| 772 | + assert props["$ai_output_tokens"] == 20 |
| 773 | + assert props["$ai_reasoning_tokens"] == 5 |
| 774 | + assert props["$ai_http_status"] == 200 |
| 775 | + assert props["foo"] == "bar" |
| 776 | + assert isinstance(props["$ai_latency"], float) |
0 commit comments