diff --git a/gradient_adk/runtime/langgraph/langgraph_instrumentor.py b/gradient_adk/runtime/langgraph/langgraph_instrumentor.py index 7d84069..8e5acf8 100644 --- a/gradient_adk/runtime/langgraph/langgraph_instrumentor.py +++ b/gradient_adk/runtime/langgraph/langgraph_instrumentor.py @@ -12,7 +12,11 @@ from ..interfaces import NodeExecution from ..digitalocean_tracker import DigitalOceanTracesTracker -from ..network_interceptor import get_network_interceptor +from ..network_interceptor import ( + get_network_interceptor, + is_inference_url, + is_kbaas_url, +) WRAPPED_FLAG = "__do_wrapped__" @@ -232,17 +236,68 @@ def _had_hits_since(intr, token) -> bool: return False -def _get_captured_payloads(intr, token) -> tuple: - """Get captured API request/response payloads if available (e.g., for LLM calls).""" +def _get_captured_payloads_with_type(intr, token) -> tuple: + """Get captured API request/response payloads and classify the call type. + + Returns: + (request_payload, response_payload, is_llm, is_retriever) + """ try: captured = intr.get_captured_requests_since(token) if captured: # Use the first captured request (most common case) call = captured[0] - return call.request_payload, call.response_payload + url = call.url + is_llm = is_inference_url(url) + is_retriever = is_kbaas_url(url) + return call.request_payload, call.response_payload, is_llm, is_retriever except Exception: pass - return None, None + return None, None, False, False + + +def _transform_kbaas_response(response: Optional[Dict[str, Any]]) -> Optional[list]: + """Transform KBaaS response to standard retriever format. + + Extracts results and maps content fields to 'page_content'. + + For hierarchical KB (parent retrieval): + - Uses 'parent_chunk_text' as 'page_content' (the context users typically want) + - Preserves 'text_content' as 'embedded_content' for reference + + For standard KB: + - Uses 'text_content' as 'page_content' + + Returns a list of dicts as expected for retriever spans. + """ + if not isinstance(response, dict): + return response + + results = response.get("results", []) + if not isinstance(results, list): + return response + + transformed_results = [] + for item in results: + if isinstance(item, dict): + new_item = dict(item) + + # For hierarchical KB: prefer parent_chunk_text as page_content + if "parent_chunk_text" in new_item: + new_item["page_content"] = new_item.pop("parent_chunk_text") + # Preserve embedded text as embedded_content for reference + if "text_content" in new_item: + new_item["embedded_content"] = new_item.pop("text_content") + elif "text_content" in new_item: + # Standard KB: use text_content as page_content + new_item["page_content"] = new_item.pop("text_content") + + transformed_results.append(new_item) + else: + transformed_results.append(item) + + # Return just the array of results + return transformed_results class LangGraphInstrumentor: @@ -280,20 +335,33 @@ def _finish_ok( # (_wrap_async_func, _wrap_sync_func, etc.) BEFORE calling _finish_ok. # The wrappers collect streamed content and pass {"content": "..."} here. - # Check if this node made any tracked API calls (e.g., LLM inference) + # Check if this node made any tracked API calls (e.g., LLM inference or KBaaS retrieval) if _had_hits_since(intr, tok): - _ensure_meta(rec)["is_llm_call"] = True + # Get captured payloads and classify the call type + api_request, api_response, is_llm, is_retriever = ( + _get_captured_payloads_with_type(intr, tok) + ) - # Try to get actual API request/response payloads (for LLM calls) - api_request, api_response = _get_captured_payloads(intr, tok) + # Set metadata based on call type + meta = _ensure_meta(rec) + if is_llm: + meta["is_llm_call"] = True + elif is_retriever: + meta["is_retriever_call"] = True + else: + # Fallback: assume LLM call for backward compatibility + meta["is_llm_call"] = True if api_request or api_response: # Use actual API payloads instead of function args if api_request: rec.inputs = _freeze(api_request) - # Use actual API response as output (e.g., LLM completion) + # Use actual API response as output if api_response: + # Transform KBaaS response to standard retriever format + if is_retriever: + api_response = _transform_kbaas_response(api_response) out_payload = _freeze(api_response) else: out_payload = _canonical_output(inputs_snapshot, a, kw, ret) @@ -306,10 +374,21 @@ def _finish_ok( def _finish_err(rec: NodeExecution, intr, tok, e: BaseException): if _had_hits_since(intr, tok): - _ensure_meta(rec)["is_llm_call"] = True + # Get captured payloads and classify the call type + api_request, _, is_llm, is_retriever = _get_captured_payloads_with_type( + intr, tok + ) + + # Set metadata based on call type + meta = _ensure_meta(rec) + if is_llm: + meta["is_llm_call"] = True + elif is_retriever: + meta["is_retriever_call"] = True + else: + # Fallback: assume LLM call for backward compatibility + meta["is_llm_call"] = True - # Try to get actual API request payload even on error - api_request, _ = _get_captured_payloads(intr, tok) if api_request: rec.inputs = _freeze(api_request) @@ -623,4 +702,4 @@ def wrapped_add_node(graph_self, *args, **kwargs): return original_add_node(graph_self, *args, **kwargs) StateGraph.add_node = wrapped_add_node - self._installed = True + self._installed = True \ No newline at end of file diff --git a/gradient_adk/runtime/network_interceptor.py b/gradient_adk/runtime/network_interceptor.py index 10b09aa..728252c 100644 --- a/gradient_adk/runtime/network_interceptor.py +++ b/gradient_adk/runtime/network_interceptor.py @@ -24,9 +24,11 @@ class CapturedRequest: def __init__( self, + url: Optional[str] = None, request_payload: Optional[Dict[str, Any]] = None, response_payload: Optional[Dict[str, Any]] = None, ): + self.url = url self.request_payload = request_payload self.response_payload = response_payload @@ -287,9 +289,9 @@ def _record_request( with self._lock: if self._is_tracked_url(url): self._hit_count += 1 - # Create a new captured request record + # Create a new captured request record with URL self._captured_requests.append( - CapturedRequest(request_payload=request_payload) + CapturedRequest(url=url, request_payload=request_payload) ) def _record_response( @@ -401,6 +403,25 @@ def hook(url: str, headers: Dict[str, str]) -> Dict[str, str]: return hook +# URL classification helpers for different DigitalOcean services +INFERENCE_URL_PATTERNS = ["inference.do-ai.run", "inference.do-ai-test.run"] +KBAAS_URL_PATTERNS = ["kbaas.do-ai.run", "kbaas.do-ai-test.run"] + + +def is_inference_url(url: Optional[str]) -> bool: + """Check if URL matches DigitalOcean inference (LLM) endpoints.""" + if not url: + return False + return any(pattern in url for pattern in INFERENCE_URL_PATTERNS) + + +def is_kbaas_url(url: Optional[str]) -> bool: + """Check if URL matches DigitalOcean KBaaS (Knowledge Base) endpoints.""" + if not url: + return False + return any(pattern in url for pattern in KBAAS_URL_PATTERNS) + + # Global instance _global_interceptor = NetworkInterceptor() @@ -411,14 +432,21 @@ def get_network_interceptor() -> NetworkInterceptor: def setup_digitalocean_interception() -> None: intr = get_network_interceptor() - intr.add_endpoint_pattern("inference.do-ai.run") - intr.add_endpoint_pattern("inference.do-ai-test.run") - # Register User-Agent hook for ADK identification + # Add inference (LLM) endpoint patterns + for pattern in INFERENCE_URL_PATTERNS: + intr.add_endpoint_pattern(pattern) + + # Add KBaaS (Knowledge Base) endpoint patterns + for pattern in KBAAS_URL_PATTERNS: + intr.add_endpoint_pattern(pattern) + + # Register User-Agent hook for ADK identification (all DO endpoints) + all_patterns = INFERENCE_URL_PATTERNS + KBAAS_URL_PATTERNS ua_hook = create_adk_user_agent_hook( version=_get_adk_version(), - url_patterns=["inference.do-ai.run", "inference.do-ai-test.run"], + url_patterns=all_patterns, ) intr.add_request_hook(ua_hook) - intr.start_intercepting() + intr.start_intercepting() \ No newline at end of file diff --git a/tests/runtime/langgraph/langgraph_instrumentor_test.py b/tests/runtime/langgraph/langgraph_instrumentor_test.py index 827fc73..9af210c 100644 --- a/tests/runtime/langgraph/langgraph_instrumentor_test.py +++ b/tests/runtime/langgraph/langgraph_instrumentor_test.py @@ -5,6 +5,8 @@ from gradient_adk.runtime.langgraph.langgraph_instrumentor import ( LangGraphInstrumentor, WRAPPED_FLAG, + _transform_kbaas_response, + _get_captured_payloads_with_type, ) @@ -197,3 +199,368 @@ def f(state: dict): app = _compile_singleton_graph(g, "noop") assert app.invoke({}) == {"ok": 1} + + +# ----------------------------- +# KBaaS Response Transformation Tests +# ----------------------------- + + +def test_transform_kbaas_response_converts_text_content_to_page_content(): + """Test that text_content is converted to page_content in results.""" + response = { + "results": [ + { + "metadata": {"source": "doc1.pdf", "page": 1}, + "text_content": "This is the document content." + }, + { + "metadata": {"source": "doc2.pdf", "page": 2}, + "text_content": "Another document chunk." + } + ], + "total_results": 2 + } + + transformed = _transform_kbaas_response(response) + + # Should return a list (array) directly, not a dict + assert isinstance(transformed, list) + assert len(transformed) == 2 + + # Check that text_content was converted to page_content + assert "text_content" not in transformed[0] + assert "text_content" not in transformed[1] + assert transformed[0]["page_content"] == "This is the document content." + assert transformed[1]["page_content"] == "Another document chunk." + + # Check that metadata is preserved + assert transformed[0]["metadata"]["source"] == "doc1.pdf" + assert transformed[1]["metadata"]["source"] == "doc2.pdf" + + +def test_transform_kbaas_response_handles_empty_results(): + """Test that empty results list is handled correctly.""" + response = { + "results": [], + "total_results": 0 + } + + transformed = _transform_kbaas_response(response) + + # Should return an empty list + assert isinstance(transformed, list) + assert transformed == [] + + +def test_transform_kbaas_response_preserves_items_without_text_content(): + """Test that items without text_content are preserved unchanged.""" + response = { + "results": [ + { + "metadata": {"source": "doc1.pdf"}, + "text_content": "Has text content." + }, + { + "metadata": {"source": "doc2.pdf"}, + "page_content": "Already has page_content." + }, + { + "metadata": {"source": "doc3.pdf"} + # No text_content or page_content + } + ], + "total_results": 3 + } + + transformed = _transform_kbaas_response(response) + + # Should return a list + assert isinstance(transformed, list) + assert len(transformed) == 3 + + # First item should be converted + assert transformed[0]["page_content"] == "Has text content." + assert "text_content" not in transformed[0] + + # Second item should be unchanged (already has page_content) + assert transformed[1]["page_content"] == "Already has page_content." + + # Third item should be unchanged (no text_content) + assert "page_content" not in transformed[2] + assert "text_content" not in transformed[2] + + +def test_transform_kbaas_response_handles_none(): + """Test that None response is handled gracefully.""" + assert _transform_kbaas_response(None) is None + + +def test_transform_kbaas_response_handles_non_dict(): + """Test that non-dict responses are returned as-is.""" + assert _transform_kbaas_response("string response") == "string response" + assert _transform_kbaas_response(123) == 123 + assert _transform_kbaas_response(["list", "response"]) == ["list", "response"] + + +def test_transform_kbaas_response_handles_missing_results_key(): + """Test that response without results key returns empty list.""" + response = {"other_key": "value"} + transformed = _transform_kbaas_response(response) + # When "results" key is missing, get() returns [], so we get empty list + assert transformed == [] + + +def test_transform_kbaas_response_hierarchical_kb_with_parent_chunk(): + """Test hierarchical KB: parent_chunk_text becomes page_content, text_content becomes embedded_content.""" + response = { + "results": [ + { + "metadata": {"source": "doc1.pdf", "page": 1}, + "text_content": "This is the embedded chunk.", + "parent_chunk_text": "This is the full parent context with more information." + }, + { + "metadata": {"source": "doc2.pdf", "page": 2}, + "text_content": "Another embedded chunk.", + "parent_chunk_text": "Another parent context." + } + ], + "total_results": 2 + } + + transformed = _transform_kbaas_response(response) + + # Should return a list + assert isinstance(transformed, list) + assert len(transformed) == 2 + + # parent_chunk_text should become page_content + assert transformed[0]["page_content"] == "This is the full parent context with more information." + assert transformed[1]["page_content"] == "Another parent context." + + # text_content should become embedded_content + assert transformed[0]["embedded_content"] == "This is the embedded chunk." + assert transformed[1]["embedded_content"] == "Another embedded chunk." + + # Original keys should be removed + assert "parent_chunk_text" not in transformed[0] + assert "parent_chunk_text" not in transformed[1] + assert "text_content" not in transformed[0] + assert "text_content" not in transformed[1] + + # Metadata should be preserved + assert transformed[0]["metadata"]["source"] == "doc1.pdf" + + +def test_transform_kbaas_response_hierarchical_kb_parent_only(): + """Test hierarchical KB with parent_chunk_text but no text_content.""" + response = { + "results": [ + { + "metadata": {"source": "doc1.pdf"}, + "parent_chunk_text": "Parent context only." + } + ], + "total_results": 1 + } + + transformed = _transform_kbaas_response(response) + + assert isinstance(transformed, list) + assert len(transformed) == 1 + assert transformed[0]["page_content"] == "Parent context only." + assert "embedded_content" not in transformed[0] + assert "parent_chunk_text" not in transformed[0] + + +def test_transform_kbaas_response_mixed_results(): + """Test mixed results: some with parent_chunk_text, some with only text_content.""" + response = { + "results": [ + { + "metadata": {"source": "hierarchical.pdf"}, + "text_content": "Embedded chunk.", + "parent_chunk_text": "Full parent context." + }, + { + "metadata": {"source": "standard.pdf"}, + "text_content": "Standard KB chunk." + }, + { + "metadata": {"source": "empty.pdf"} + # No content fields + } + ], + "total_results": 3 + } + + transformed = _transform_kbaas_response(response) + + assert isinstance(transformed, list) + assert len(transformed) == 3 + + # First item: hierarchical (has parent_chunk_text) + assert transformed[0]["page_content"] == "Full parent context." + assert transformed[0]["embedded_content"] == "Embedded chunk." + + # Second item: standard (only text_content) + assert transformed[1]["page_content"] == "Standard KB chunk." + assert "embedded_content" not in transformed[1] + + # Third item: no content fields + assert "page_content" not in transformed[2] + assert "embedded_content" not in transformed[2] + + +# ----------------------------- +# Retriever Call Detection Tests +# ----------------------------- + + +def test_retriever_hit_sets_metadata(tracker, interceptor): + """Test that KBaaS calls set is_retriever_call metadata instead of is_llm_call.""" + # Create a mock captured request for KBaaS + mock_captured = MagicMock() + mock_captured.url = "https://kbaas.do-ai.run/v1/retrieve" + mock_captured.request_payload = {"query": "test query"} + mock_captured.response_payload = { + "results": [{"text_content": "doc content", "metadata": {}}], + "total_results": 1 + } + + interceptor.hits_since.return_value = 1 + interceptor.get_captured_requests_since.return_value = [mock_captured] + + inst = LangGraphInstrumentor() + inst.install(tracker) + + def node(state: dict): + return {"r": 1} + + g = make_graph() + g.add_node("retriever", node) + app = _compile_singleton_graph(g, "retriever") + + app.invoke({}) + + # NodeExecution record is arg0 to on_node_end + exec_rec = tracker.on_node_end.call_args[0][0] + assert exec_rec.metadata.get("is_retriever_call") is True + assert exec_rec.metadata.get("is_llm_call") is None or exec_rec.metadata.get("is_llm_call") is False + + +def test_retriever_response_is_transformed(tracker, interceptor): + """Test that KBaaS responses have text_content converted to page_content.""" + # Create a mock captured request for KBaaS + mock_captured = MagicMock() + mock_captured.url = "https://kbaas.do-ai.run/v1/retrieve" + mock_captured.request_payload = {"query": "test query"} + mock_captured.response_payload = { + "results": [ + {"text_content": "Document content here", "metadata": {"source": "test.pdf"}} + ], + "total_results": 1 + } + + interceptor.hits_since.return_value = 1 + interceptor.get_captured_requests_since.return_value = [mock_captured] + + inst = LangGraphInstrumentor() + inst.install(tracker) + + def node(state: dict): + return {"r": 1} + + g = make_graph() + g.add_node("retriever", node) + app = _compile_singleton_graph(g, "retriever") + + app.invoke({}) + + # Check the output payload passed to on_node_end + out_payload = tracker.on_node_end.call_args[0][1] + + # The response should be a list (array) with page_content instead of text_content + assert isinstance(out_payload, list) + assert len(out_payload) == 1 + assert out_payload[0]["page_content"] == "Document content here" + assert "text_content" not in out_payload[0] + + +def test_inference_call_still_sets_llm_metadata(tracker, interceptor): + """Test that inference calls still set is_llm_call metadata.""" + # Create a mock captured request for inference + mock_captured = MagicMock() + mock_captured.url = "https://inference.do-ai.run/v1/chat/completions" + mock_captured.request_payload = {"messages": [{"role": "user", "content": "Hello"}]} + mock_captured.response_payload = {"choices": [{"message": {"content": "Hi!"}}]} + + interceptor.hits_since.return_value = 1 + interceptor.get_captured_requests_since.return_value = [mock_captured] + + inst = LangGraphInstrumentor() + inst.install(tracker) + + def node(state: dict): + return {"r": 1} + + g = make_graph() + g.add_node("llm", node) + app = _compile_singleton_graph(g, "llm") + + app.invoke({}) + + # NodeExecution record is arg0 to on_node_end + exec_rec = tracker.on_node_end.call_args[0][0] + assert exec_rec.metadata.get("is_llm_call") is True + assert exec_rec.metadata.get("is_retriever_call") is None or exec_rec.metadata.get("is_retriever_call") is False + + +def test_get_captured_payloads_with_type_inference_url(): + """Test _get_captured_payloads_with_type correctly identifies inference URLs.""" + mock_intr = MagicMock() + mock_captured = MagicMock() + mock_captured.url = "https://inference.do-ai.run/v1/chat" + mock_captured.request_payload = {"messages": []} + mock_captured.response_payload = {"choices": []} + + mock_intr.get_captured_requests_since.return_value = [mock_captured] + + req, resp, is_llm, is_retriever = _get_captured_payloads_with_type(mock_intr, 0) + + assert req == {"messages": []} + assert resp == {"choices": []} + assert is_llm is True + assert is_retriever is False + + +def test_get_captured_payloads_with_type_kbaas_url(): + """Test _get_captured_payloads_with_type correctly identifies KBaaS URLs.""" + mock_intr = MagicMock() + mock_captured = MagicMock() + mock_captured.url = "https://kbaas.do-ai.run/retrieve" + mock_captured.request_payload = {"query": "test"} + mock_captured.response_payload = {"results": []} + + mock_intr.get_captured_requests_since.return_value = [mock_captured] + + req, resp, is_llm, is_retriever = _get_captured_payloads_with_type(mock_intr, 0) + + assert req == {"query": "test"} + assert resp == {"results": []} + assert is_llm is False + assert is_retriever is True + + +def test_get_captured_payloads_with_type_no_captures(): + """Test _get_captured_payloads_with_type when no requests captured.""" + mock_intr = MagicMock() + mock_intr.get_captured_requests_since.return_value = [] + + req, resp, is_llm, is_retriever = _get_captured_payloads_with_type(mock_intr, 0) + + assert req is None + assert resp is None + assert is_llm is False + assert is_retriever is False \ No newline at end of file diff --git a/tests/runtime/network_interceptor_test.py b/tests/runtime/network_interceptor_test.py index 6f79520..7b3531f 100644 --- a/tests/runtime/network_interceptor_test.py +++ b/tests/runtime/network_interceptor_test.py @@ -11,6 +11,10 @@ setup_digitalocean_interception, create_adk_user_agent_hook, RequestHook, + is_inference_url, + is_kbaas_url, + INFERENCE_URL_PATTERNS, + KBAAS_URL_PATTERNS, ) @@ -467,4 +471,136 @@ def test_setup_digitalocean_interception_registers_ua_hook(): result = intr._apply_request_hooks("https://inference.do-ai.run/v1/chat", headers) # Should completely replace with Gradient/adk/{version} format - assert result["User-Agent"].startswith("Gradient/adk/") \ No newline at end of file + assert result["User-Agent"].startswith("Gradient/adk/") + + +# ---- URL Classification Helper Tests ---- + + +def test_is_inference_url_prod(): + """Test that is_inference_url correctly identifies production inference URLs.""" + assert is_inference_url("https://inference.do-ai.run/v1/chat/completions") is True + assert is_inference_url("https://api.inference.do-ai.run/v1/chat") is True + + +def test_is_inference_url_test(): + """Test that is_inference_url correctly identifies test inference URLs.""" + assert is_inference_url("https://inference.do-ai-test.run/v1/chat/completions") is True + assert is_inference_url("https://api.inference.do-ai-test.run/v1/chat") is True + + +def test_is_inference_url_negative(): + """Test that is_inference_url returns False for non-inference URLs.""" + assert is_inference_url("https://example.com/api") is False + assert is_inference_url("https://kbaas.do-ai.run/retrieve") is False + assert is_inference_url("https://openai.com/v1/chat") is False + assert is_inference_url(None) is False + assert is_inference_url("") is False + + +def test_is_kbaas_url_prod(): + """Test that is_kbaas_url correctly identifies production KBaaS URLs.""" + assert is_kbaas_url("https://kbaas.do-ai.run/retrieve") is True + assert is_kbaas_url("https://api.kbaas.do-ai.run/v1/retrieve") is True + + +def test_is_kbaas_url_test(): + """Test that is_kbaas_url correctly identifies test KBaaS URLs.""" + assert is_kbaas_url("https://kbaas.do-ai-test.run/retrieve") is True + assert is_kbaas_url("https://api.kbaas.do-ai-test.run/v1/retrieve") is True + + +def test_is_kbaas_url_negative(): + """Test that is_kbaas_url returns False for non-KBaaS URLs.""" + assert is_kbaas_url("https://example.com/api") is False + assert is_kbaas_url("https://inference.do-ai.run/v1/chat") is False + assert is_kbaas_url("https://openai.com/v1/embeddings") is False + assert is_kbaas_url(None) is False + assert is_kbaas_url("") is False + + +def test_url_patterns_are_mutually_exclusive(): + """Test that inference and KBaaS URL patterns don't overlap.""" + # Inference URLs should not be identified as KBaaS + for pattern in INFERENCE_URL_PATTERNS: + test_url = f"https://{pattern}/v1/chat" + assert is_inference_url(test_url) is True + assert is_kbaas_url(test_url) is False + + # KBaaS URLs should not be identified as inference + for pattern in KBAAS_URL_PATTERNS: + test_url = f"https://{pattern}/retrieve" + assert is_kbaas_url(test_url) is True + assert is_inference_url(test_url) is False + + +# ---- KBaaS Endpoint Pattern Tests ---- + + +def test_setup_digitalocean_interception_includes_kbaas_patterns(): + """Test that setup_digitalocean_interception includes KBaaS endpoint patterns.""" + setup_digitalocean_interception() + intr = get_network_interceptor() + + with intr._lock: + patterns = set(intr._tracked_endpoints) + active = intr._active + + assert active + + # Check inference patterns + assert "inference.do-ai.run" in patterns + assert "inference.do-ai-test.run" in patterns + + # Check KBaaS patterns + assert "kbaas.do-ai.run" in patterns + assert "kbaas.do-ai-test.run" in patterns + + +def test_kbaas_request_is_tracked(): + """Test that KBaaS requests are tracked by the interceptor.""" + setup_digitalocean_interception() + intr = get_network_interceptor() + + token = intr.snapshot_token() + + # Record a KBaaS request + intr._record_request("https://kbaas.do-ai.run/v1/retrieve", {"query": "test"}) + + assert intr.hits_since(token) == 1 + + # Check that URL is captured + captured = intr.get_captured_requests_since(token) + assert len(captured) == 1 + assert captured[0].url == "https://kbaas.do-ai.run/v1/retrieve" + assert captured[0].request_payload == {"query": "test"} + + +def test_kbaas_user_agent_hook_applied(): + """Test that User-Agent hook is applied to KBaaS requests.""" + setup_digitalocean_interception() + intr = get_network_interceptor() + + headers = {"User-Agent": "TestClient/1.0"} + result = intr._apply_request_hooks("https://kbaas.do-ai.run/retrieve", headers) + + # Should completely replace with Gradient/adk/{version} format + assert result["User-Agent"].startswith("Gradient/adk/") + + +# ---- Captured Request URL Tests ---- + + +def test_captured_request_includes_url(intr): + """Test that CapturedRequest objects include the URL.""" + intr.add_endpoint_pattern("test.example.com") + + url = "https://test.example.com/api/endpoint" + payload = {"key": "value"} + + intr._record_request(url, payload) + + captured = intr.get_captured_requests_since(0) + assert len(captured) == 1 + assert captured[0].url == url + assert captured[0].request_payload == payload \ No newline at end of file