4
4
OpenAI's chat models.
5
5
"""
6
6
7
+ from __future__ import annotations
8
+
7
9
import json
8
10
import warnings
9
11
from typing import TYPE_CHECKING , Any , Literal , Optional , Union , cast
10
12
11
13
if TYPE_CHECKING :
12
14
from openai .types .chat import ChatCompletion , ChatCompletionMessage , ChatCompletionMessageParam
15
+ from openai .types .responses import Response
16
+
13
17
14
18
# Define message prefixes
15
19
_SYSTEM_PREFIX = "System: "
@@ -208,7 +212,7 @@ def _find_index_after_first_system_block(messages: list[dict[str, Any]]) -> int:
208
212
209
213
210
214
def _form_prompt_responses_api (
211
- messages : list [dict [str , Any ]],
215
+ messages : list [dict [str , Any ]] | str ,
212
216
tools : Optional [list [dict [str , Any ]]] = None ,
213
217
** responses_api_kwargs : Any ,
214
218
) -> str :
@@ -226,7 +230,9 @@ def _form_prompt_responses_api(
226
230
Returns:
227
231
str: A formatted string representing the chat history as a single prompt.
228
232
"""
229
- messages = messages .copy ()
233
+
234
+ messages = [{"role" : "user" , "content" : messages }] if isinstance (messages , str ) else messages .copy ()
235
+
230
236
output = ""
231
237
232
238
# Find the index after the first consecutive block of system messages
@@ -301,7 +307,7 @@ def _form_prompt_responses_api(
301
307
302
308
303
309
def _form_prompt_chat_completions_api (
304
- messages : list [" ChatCompletionMessageParam" ],
310
+ messages : list [ChatCompletionMessageParam ],
305
311
tools : Optional [list [dict [str , Any ]]] = None ,
306
312
) -> str :
307
313
"""
@@ -443,7 +449,7 @@ def form_prompt_string(
443
449
)
444
450
445
451
446
- def form_response_string_chat_completions (response : " ChatCompletion" ) -> str :
452
+ def form_response_string_chat_completions (response : ChatCompletion ) -> str :
447
453
"""Form a single string representing the response, out of the raw response object returned by OpenAI's Chat Completions API.
448
454
449
455
This function extracts the assistant's response message from a ChatCompletion object
@@ -468,7 +474,9 @@ def form_response_string_chat_completions(response: "ChatCompletion") -> str:
468
474
return form_response_string_chat_completions_api (response_msg )
469
475
470
476
471
- def form_response_string_chat_completions_api (response : Union [dict [str , Any ], "ChatCompletionMessage" ]) -> str :
477
+ def form_response_string_chat_completions_api (
478
+ response : Union [dict [str , Any ], ChatCompletionMessage ],
479
+ ) -> str :
472
480
"""
473
481
Form a single string representing the response, out of an assistant response message dictionary in Chat Completions API format.
474
482
@@ -517,6 +525,60 @@ def form_response_string_chat_completions_api(response: Union[dict[str, Any], "C
517
525
return str (content )
518
526
519
527
528
+ def form_response_string_responses_api (response : Response ) -> str :
529
+ """
530
+ Format an assistant response message dictionary from the Responses API into a single string.
531
+
532
+ Given a Response object from the Responses API, this function formats the response into a string
533
+ that includes both content and tool calls (if present). Tool calls are formatted using XML tags
534
+ with JSON content, consistent with the format used in `form_prompt_string`.
535
+
536
+ Args:
537
+ response (Responses): A Response object from the OpenAI Responses API containing output elements with message content and/or function calls
538
+
539
+ Returns:
540
+ str: A formatted string containing the response content and any tool calls.
541
+ Tool calls are formatted as XML tags containing JSON with function
542
+ name and arguments.
543
+
544
+ Raises:
545
+ ImportError: If openai is not installed.
546
+ """
547
+ try :
548
+ from openai .types .responses .response_output_text import ResponseOutputText
549
+ except ImportError as e :
550
+ raise ImportError ("OpenAI is a required dependency. Please install it with `pip install openai`." ) from e
551
+
552
+ content_parts = []
553
+
554
+ for output in response .output :
555
+ if output .type == "message" :
556
+ output_content = [content .text for content in output .content if isinstance (content , ResponseOutputText )]
557
+ content_parts .append ("\n " .join (output_content ))
558
+ elif output .type == "function_call" :
559
+ try :
560
+ tool_call = {
561
+ "name" : output .name ,
562
+ "arguments" : (json .loads (output .arguments ) if output .arguments else {}),
563
+ "call_id" : output .call_id ,
564
+ }
565
+ content_parts .append (f"{ _TOOL_CALL_TAG_START } \n { json .dumps (tool_call , indent = 2 )} \n { _TOOL_CALL_TAG_END } " )
566
+ except (AttributeError , TypeError , json .JSONDecodeError ) as e :
567
+ warnings .warn (
568
+ f"Error formatting tool call in response: { e } . Skipping this tool call." ,
569
+ UserWarning ,
570
+ stacklevel = 2 ,
571
+ )
572
+ else :
573
+ warnings .warn (
574
+ f"Unexpected output type: { output .type } . Skipping this output." ,
575
+ UserWarning ,
576
+ stacklevel = 2 ,
577
+ )
578
+
579
+ return "\n " .join (content_parts )
580
+
581
+
520
582
def _response_to_dict (response : Any ) -> dict [str , Any ]:
521
583
# `response` should be a Union[dict[str, Any], ChatCompletionMessage], but last isinstance check wouldn't be reachable
522
584
if isinstance (response , dict ):
0 commit comments