Skip to content

Commit 8832cd4

Browse files
authored
OpenAI responses API improvements (brainlid#391)
* ChatOpenAIResponses: Add usage tracking for non-streaming response * ChatOpenAIResponses: Process file_search_call content part
1 parent a01bbeb commit 8832cd4

File tree

2 files changed

+112
-2
lines changed

2 files changed

+112
-2
lines changed

lib/chat_models/chat_open_ai_responses.ex

Lines changed: 39 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -877,15 +877,25 @@ defmodule LangChain.ChatModels.ChatOpenAIResponses do
877877
| {:error, String.t()}
878878

879879
# Complete Response with output lists
880-
def do_process_response(_model, %{"status" => "completed", "output" => content_items})
880+
def do_process_response(
881+
_model,
882+
%{"status" => "completed", "output" => content_items} = response
883+
)
881884
when is_list(content_items) do
882885
{content_parts, tool_calls} = content_items_to_content_parts_and_tool_calls(content_items)
883886

887+
metadata =
888+
case get_token_usage(response) do
889+
nil -> %{}
890+
%TokenUsage{} = usage -> %{usage: usage}
891+
end
892+
884893
Message.new!(%{
885894
content: content_parts,
886895
status: :complete,
887896
role: :assistant,
888-
tool_calls: tool_calls
897+
tool_calls: tool_calls,
898+
metadata: metadata
889899
})
890900
end
891901

@@ -1256,6 +1266,33 @@ defmodule LangChain.ChatModels.ChatOpenAIResponses do
12561266
end
12571267
end
12581268

1269+
defp content_item_to_content_part_or_tool_call(
1270+
%{
1271+
"type" => "file_search_call"
1272+
} = part
1273+
) do
1274+
# Store reasoning as an unsupported content part for now
1275+
# This preserves the information without breaking the flow
1276+
case ContentPart.new(%{
1277+
type: :unsupported,
1278+
options: %{
1279+
id: part["id"],
1280+
type: "file_search_call",
1281+
queries: part["queries"],
1282+
results: part["results"]
1283+
}
1284+
}) do
1285+
{:ok, %ContentPart{} = part} ->
1286+
part
1287+
1288+
{:error, %Ecto.Changeset{} = changeset} ->
1289+
reason = Utils.changeset_error_to_string(changeset)
1290+
Logger.warning("Failed to process file_search output. Reason: #{reason}")
1291+
# Return a minimal content part to avoid breaking the flow
1292+
ContentPart.text!("")
1293+
end
1294+
end
1295+
12591296
# Catch-all for unknown content item types
12601297
defp content_item_to_content_part_or_tool_call(%{"type" => type} = item) do
12611298
Logger.warning("Unknown content item type: #{type}. Item: #{inspect(item)}")

test/chat_models/chat_open_ai_responses_test.exs

Lines changed: 73 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -641,6 +641,79 @@ defmodule LangChain.ChatModels.ChatOpenAIResponsesTest do
641641
assert length(result.tool_calls) == 1
642642
end
643643

644+
test "handles completed response with usage metadata", %{model: model} do
645+
response = %{
646+
"status" => "completed",
647+
"output" => [
648+
%{
649+
"content" => [
650+
%{
651+
"annotations" => [],
652+
"logprobs" => [],
653+
"text" => "hello",
654+
"type" => "output_text"
655+
}
656+
],
657+
"role" => "assistant",
658+
"status" => "completed",
659+
"type" => "message"
660+
}
661+
],
662+
"usage" => %{
663+
"input_tokens" => 27,
664+
"input_tokens_details" => %{"cached_tokens" => 0},
665+
"output_tokens" => 115,
666+
"output_tokens_details" => %{"reasoning_tokens" => 0},
667+
"total_tokens" => 142
668+
}
669+
}
670+
671+
result = ChatOpenAIResponses.do_process_response(model, response)
672+
assert %LangChain.Message{} = result
673+
assert %{usage: %LangChain.TokenUsage{} = usage} = result.metadata
674+
assert %LangChain.TokenUsage{input: 27, output: 115} = usage
675+
end
676+
677+
test "handles completed response with file_search results", %{model: model} do
678+
response = %{
679+
"status" => "completed",
680+
"output" => [
681+
%{
682+
"id" => "fs_0d1b1549e16f51d20168e6af70c7b8819fae111b23577a202d",
683+
"queries" => ["What is the meaning of life?"],
684+
"results" => [
685+
%{
686+
"attributes" => %{},
687+
"file_id" => "file-yTrvU63VL1AgEyFqtUYzfVT3",
688+
"filename" => "Enreach_Contact.pdf",
689+
"score" => 0.0059,
690+
"text" => "text part 1",
691+
"vector_store_id" => "vs_1"
692+
},
693+
%{
694+
"attributes" => %{},
695+
"file_id" => "file-yTrvU63VL1AgEyFqtUYzfVT3",
696+
"filename" => "Enreach_Contact.pdf",
697+
"score" => 0.0037,
698+
"text" => "text part 2",
699+
"vector_store_id" => "vs_2"
700+
}
701+
],
702+
"status" => "completed",
703+
"type" => "file_search_call"
704+
}
705+
]
706+
}
707+
708+
result = ChatOpenAIResponses.do_process_response(model, response)
709+
assert %LangChain.Message{} = result
710+
assert result.role == :assistant
711+
assert result.status == :complete
712+
[content_part] = result.content
713+
assert content_part.type == :unsupported
714+
assert %{results: [_, _], queries: [_], type: "file_search_call"} = content_part.options
715+
end
716+
644717
test "handles error responses", %{model: model} do
645718
response = %{"error" => %{"message" => "API key invalid"}}
646719

0 commit comments

Comments
 (0)