@@ -409,11 +409,8 @@ def test_format_request_system_prompt(model, messages, model_id, system_prompt):
409409
410410def test_format_request_system_prompt_content (model , messages , model_id ):
411411 """Test _format_request with SystemContentBlock input."""
412- system_prompt_content = [
413- {"text" : "You are a helpful assistant." },
414- {"cachePoint" : {"type" : "default" }}
415- ]
416-
412+ system_prompt_content = [{"text" : "You are a helpful assistant." }, {"cachePoint" : {"type" : "default" }}]
413+
417414 tru_request = model ._format_request (messages , system_prompt_content = system_prompt_content )
418415 exp_request = {
419416 "inferenceConfig" : {},
@@ -429,18 +426,15 @@ def test_format_request_system_prompt_content_with_cache_prompt_config(model, me
429426 """Test _format_request with SystemContentBlock and cache_prompt config (backwards compatibility)."""
430427 system_prompt_content = [{"text" : "You are a helpful assistant." }]
431428 model .update_config (cache_prompt = "default" )
432-
429+
433430 with pytest .warns (UserWarning , match = "cache_prompt is deprecated" ):
434431 tru_request = model ._format_request (messages , system_prompt_content = system_prompt_content )
435-
432+
436433 exp_request = {
437434 "inferenceConfig" : {},
438435 "modelId" : model_id ,
439436 "messages" : messages ,
440- "system" : [
441- {"text" : "You are a helpful assistant." },
442- {"cachePoint" : {"type" : "default" }}
443- ],
437+ "system" : [{"text" : "You are a helpful assistant." }, {"cachePoint" : {"type" : "default" }}],
444438 }
445439
446440 assert tru_request == exp_request
@@ -528,10 +522,10 @@ def test_format_request_tool_choice_tool(model, messages, model_id, tool_spec):
528522
529523def test_format_request_cache (model , messages , model_id , tool_spec , cache_type ):
530524 model .update_config (cache_prompt = cache_type , cache_tools = cache_type )
531-
525+
532526 with pytest .warns (UserWarning , match = "cache_prompt is deprecated" ):
533527 tru_request = model ._format_request (messages , tool_specs = [tool_spec ])
534-
528+
535529 exp_request = {
536530 "inferenceConfig" : {},
537531 "modelId" : model_id ,
@@ -668,18 +662,15 @@ async def test_stream(bedrock_client, model, messages, tool_spec, model_id, addi
668662async def test_stream_with_system_prompt_content (bedrock_client , model , messages , alist ):
669663 """Test stream method with system_prompt_content parameter."""
670664 bedrock_client .converse_stream .return_value = {"stream" : ["e1" , "e2" ]}
671-
672- system_prompt_content = [
673- {"text" : "You are a helpful assistant." },
674- {"cachePoint" : {"type" : "default" }}
675- ]
665+
666+ system_prompt_content = [{"text" : "You are a helpful assistant." }, {"cachePoint" : {"type" : "default" }}]
676667
677668 response = model .stream (messages , system_prompt_content = system_prompt_content )
678669 tru_chunks = await alist (response )
679670 exp_chunks = ["e1" , "e2" ]
680671
681672 assert tru_chunks == exp_chunks
682-
673+
683674 # Verify the request was formatted with system_prompt_content
684675 expected_request = {
685676 "inferenceConfig" : {},
@@ -694,12 +685,14 @@ async def test_stream_with_system_prompt_content(bedrock_client, model, messages
694685async def test_stream_backwards_compatibility_single_text_block (bedrock_client , model , messages , alist ):
695686 """Test that single text block in system_prompt_content works with legacy system_prompt."""
696687 bedrock_client .converse_stream .return_value = {"stream" : ["e1" , "e2" ]}
697-
688+
698689 system_prompt_content = [{"text" : "You are a helpful assistant." }]
699690
700- response = model .stream (messages , system_prompt = "You are a helpful assistant." , system_prompt_content = system_prompt_content )
691+ response = model .stream (
692+ messages , system_prompt = "You are a helpful assistant." , system_prompt_content = system_prompt_content
693+ )
701694 await alist (response )
702-
695+
703696 # Verify the request was formatted with system_prompt_content
704697 expected_request = {
705698 "inferenceConfig" : {},
0 commit comments