diff --git a/adapters_examples/langgraph/langgraph_graph.py b/adapters_examples/langgraph/langgraph_graph.py
index 5b8d40c1..45386b24 100644
--- a/adapters_examples/langgraph/langgraph_graph.py
+++ b/adapters_examples/langgraph/langgraph_graph.py
@@ -45,9 +45,10 @@
from langchain_openai import ChatOpenAI
from langgraph.graph import END, START, StateGraph
from langgraph.prebuilt import ToolNode
-from langgraph_agentspec_adapter.agentspecexporter import AgentSpecExporter
from pydantic import SecretStr
+from pyagentspec.adapters.langgraph import AgentSpecExporter
+
class MessagesState(TypedDict):
messages: list[Any]
diff --git a/docs/pyagentspec/source/_static/icons/autogen-adapter.jpg b/docs/pyagentspec/source/_static/icons/autogen-adapter.jpg
new file mode 100644
index 00000000..6439249b
Binary files /dev/null and b/docs/pyagentspec/source/_static/icons/autogen-adapter.jpg differ
diff --git a/docs/pyagentspec/source/_static/icons/crewai-adapter.jpg b/docs/pyagentspec/source/_static/icons/crewai-adapter.jpg
new file mode 100644
index 00000000..937a2bbd
Binary files /dev/null and b/docs/pyagentspec/source/_static/icons/crewai-adapter.jpg differ
diff --git a/docs/pyagentspec/source/_static/icons/langgraph-adapter.jpg b/docs/pyagentspec/source/_static/icons/langgraph-adapter.jpg
new file mode 100644
index 00000000..964d2014
Binary files /dev/null and b/docs/pyagentspec/source/_static/icons/langgraph-adapter.jpg differ
diff --git a/docs/pyagentspec/source/_static/icons/wayflow-adapter.jpg b/docs/pyagentspec/source/_static/icons/wayflow-adapter.jpg
new file mode 100644
index 00000000..d36461bf
Binary files /dev/null and b/docs/pyagentspec/source/_static/icons/wayflow-adapter.jpg differ
diff --git a/docs/pyagentspec/source/_templates/navbar-new.html b/docs/pyagentspec/source/_templates/navbar-new.html
index be6bf8ce..f59be579 100644
--- a/docs/pyagentspec/source/_templates/navbar-new.html
+++ b/docs/pyagentspec/source/_templates/navbar-new.html
@@ -13,8 +13,8 @@
Release Notes
-
- Reference Sheet
+
+ Adapters
Ecosystem
diff --git a/docs/pyagentspec/source/adapters/autogen.rst b/docs/pyagentspec/source/adapters/autogen.rst
new file mode 100644
index 00000000..0807aa1c
--- /dev/null
+++ b/docs/pyagentspec/source/adapters/autogen.rst
@@ -0,0 +1,61 @@
+.. _autogenadapter:
+
+=============================
+Agent Spec Adapters - AutoGen
+=============================
+
+
+.. figure:: ../_static/icons/autogen-adapter.jpg
+ :align: center
+ :scale: 20%
+ :alt: Agent Spec adapter for AutoGen
+
+ ↑ With the **Agent Spec adapter for AutoGen**, you can easily import agents from external frameworks using Agent Spec and run them with AutoGen.
+
+
+*Microsoft AutoGen supports the development of multi-agent conversational systems,
+allowing agents to communicate and collaborate to solve tasks.*
+
+
+Get started
+===========
+
+To get started, set up your Python environment (Python 3.10 to 3.12 required),
+and then install the PyAgentSpec package with the AutoGen extension.
+
+
+.. code-block:: bash
+
+ python -m venv .venv
+ source .venv/bin/activate # On Windows: .venv\Scripts\activate
+ pip install "pyagentspec[autogen]"
+
+
+You are now ready to use the adapter:
+
+- Run Agent Spec configurations with AutoGen (see more details :ref:`below `)
+- Convert AutoGen agents to Agent Spec (see more details :ref:`below `)
+
+
+
+.. _spectoautogen:
+
+Run Agent Spec configurations with AutoGen
+==========================================
+
+
+.. literalinclude:: ../code_examples/adapter_autogen_quickstart.py
+ :language: python
+ :start-after: .. start-agentspec_to_runtime
+ :end-before: .. end-agentspec_to_runtime
+
+
+.. _autogentospec:
+
+Convert AutoGen agents to Agent Spec
+====================================
+
+.. literalinclude:: ../code_examples/adapter_autogen_quickstart.py
+ :language: python
+ :start-after: .. start-runtime_to_agentspec
+ :end-before: .. end-runtime_to_agentspec
diff --git a/docs/pyagentspec/source/adapters/crewai.rst b/docs/pyagentspec/source/adapters/crewai.rst
new file mode 100644
index 00000000..bc5937c6
--- /dev/null
+++ b/docs/pyagentspec/source/adapters/crewai.rst
@@ -0,0 +1,60 @@
+.. _crewaiadapter:
+
+============================
+Agent Spec Adapters - CrewAI
+============================
+
+
+.. figure:: ../_static/icons/crewai-adapter.jpg
+ :align: center
+ :scale: 18%
+ :alt: Agent Spec adapter for CrewAI
+
+ ↑ With the **Agent Spec adapter for CrewAI**, you can easily import agents from external frameworks using Agent Spec and run them with CrewAI.
+
+*CrewAI enables the design of collaborative AI agents and workflows, incorporating guardrails, memory,
+and observability for production-ready multi-agent systems.*
+
+
+Get started
+===========
+
+To get started, set up your Python environment (Python 3.10 to 3.13 required),
+and then install the PyAgentSpec package with the CrewAI extension.
+
+
+.. code-block:: bash
+
+ python -m venv .venv
+ source .venv/bin/activate # On Windows: .venv\Scripts\activate
+ pip install "pyagentspec[crewai]"
+
+
+You are now ready to use the adapter:
+
+- Run Agent Spec configurations with CrewAI (see more details :ref:`below `)
+- Convert CrewAI agents to Agent Spec (see more details :ref:`below `)
+
+
+
+.. _spectocrewai:
+
+Run Agent Spec configurations with CrewAI
+=========================================
+
+
+.. literalinclude:: ../code_examples/adapter_crewai_quickstart.py
+ :language: python
+ :start-after: .. start-agentspec_to_runtime
+ :end-before: .. end-agentspec_to_runtime
+
+
+.. _crewaitospec:
+
+Convert CrewAI agents to Agent Spec
+===================================
+
+.. literalinclude:: ../code_examples/adapter_crewai_quickstart.py
+ :language: python
+ :start-after: .. start-runtime_to_agentspec
+ :end-before: .. end-runtime_to_agentspec
diff --git a/docs/pyagentspec/source/adapters/langgraph.rst b/docs/pyagentspec/source/adapters/langgraph.rst
new file mode 100644
index 00000000..7ad1cfe9
--- /dev/null
+++ b/docs/pyagentspec/source/adapters/langgraph.rst
@@ -0,0 +1,60 @@
+.. _langgraphadapter:
+
+===============================
+Agent Spec Adapters - LangGraph
+===============================
+
+
+.. figure:: ../_static/icons/langgraph-adapter.jpg
+ :align: center
+ :scale: 18%
+ :alt: Agent Spec adapter for LangGraph
+
+ ↑ With the **Agent Spec adapter for LangGraph**, you can easily import agents from external frameworks using Agent Spec and run them with LangGraph.
+
+
+*LangGraph facilitates the creation and management of long-running, stateful agents
+with durable execution and human-in-the-loop capabilities.*
+
+
+Get started
+===========
+
+To get started, set up your Python environment (Python 3.10 or newer required),
+and then install the PyAgentSpec package with the LangGraph extension.
+
+
+.. code-block:: bash
+
+ python -m venv .venv
+ source .venv/bin/activate # On Windows: .venv\Scripts\activate
+ pip install "pyagentspec[langgraph]"
+
+
+You are now ready to use the adapter:
+
+- Run Agent Spec configurations with LangGraph (see more details :ref:`below `)
+- Convert LangGraph agents to Agent Spec (see more details :ref:`below `)
+
+
+.. _spectolanggraph:
+
+Run Agent Spec configurations with LangGraph
+============================================
+
+
+.. literalinclude:: ../code_examples/adapter_langgraph_quickstart.py
+ :language: python
+ :start-after: .. start-agentspec_to_runtime
+ :end-before: .. end-agentspec_to_runtime
+
+
+.. _langgraphtospec:
+
+Convert LangGraph agents to Agent Spec
+======================================
+
+.. literalinclude:: ../code_examples/adapter_langgraph_quickstart.py
+ :language: python
+ :start-after: .. start-runtime_to_agentspec
+ :end-before: .. end-runtime_to_agentspec
diff --git a/docs/pyagentspec/source/adapters/wayflow.rst b/docs/pyagentspec/source/adapters/wayflow.rst
new file mode 100644
index 00000000..670482bc
--- /dev/null
+++ b/docs/pyagentspec/source/adapters/wayflow.rst
@@ -0,0 +1,61 @@
+.. _wayflowadapter:
+
+=============================
+Agent Spec Adapters - WayFlow
+=============================
+
+
+.. figure:: ../_static/icons/wayflow-adapter.jpg
+ :align: center
+ :scale: 18%
+ :alt: Agent Spec adapter for WayFlow
+
+ ↑ With the **Agent Spec adapter for WayFlow**, you can easily import agents from external frameworks using Agent Spec and run them with WayFlow.
+
+
+*WayFlow is the reference framework for Agent Spec, provides modular components for developing AI-powered assistants,
+supporting both workflow-based and agent-style applications.*
+
+
+Get started
+===========
+
+To get started, set up your Python environment (Python 3.10 or newer required),
+and then install the PyAgentSpec package as well as WayFlowCore.
+
+
+.. code-block:: bash
+
+ python -m venv .venv
+ source .venv/bin/activate # On Windows: .venv\Scripts\activate
+ pip install pyagentspec wayflowcore
+
+
+You are now ready to use the adapter:
+
+- Run Agent Spec configurations with WayFlow (see more details :ref:`below `)
+- Convert WayFlow agents to Agent Spec (see more details :ref:`below `)
+
+
+
+.. _spectowayflow:
+
+Run Agent Spec configurations with WayFlow
+==========================================
+
+
+.. literalinclude:: ../code_examples/adapter_wayflow_quickstart.py
+ :language: python
+ :start-after: .. start-agentspec_to_runtime
+ :end-before: .. end-agentspec_to_runtime
+
+
+.. _wayflowtospec:
+
+Convert WayFlow agents to Agent Spec
+====================================
+
+.. literalinclude:: ../code_examples/adapter_wayflow_quickstart.py
+ :language: python
+ :start-after: .. start-runtime_to_agentspec
+ :end-before: .. end-runtime_to_agentspec
diff --git a/docs/pyagentspec/source/changelog.rst b/docs/pyagentspec/source/changelog.rst
index 190523f9..2b190b61 100644
--- a/docs/pyagentspec/source/changelog.rst
+++ b/docs/pyagentspec/source/changelog.rst
@@ -33,13 +33,13 @@ New features
* **Sensitive Fields Support:**
- New fields have been added to Agent Spec components that may carry sensitive data (e.g. the field `api_key` on :ref:`OpenAICompatibleModel `). To provide this functionality securely, we also introduced the annotation `SensitiveField` such that the sensitive fields are automatically excluded when exporting a Component to its JSON or yaml configuration.
+ New fields have been added to Agent Spec components that may carry sensitive data (e.g. the field `api_key` on :ref:`OpenAiCompatibleConfig `). To provide this functionality securely, we also introduced the annotation `SensitiveField` such that the sensitive fields are automatically excluded when exporting a Component to its JSON or yaml configuration.
For more information read the :ref:`latest specification `.
* **OpenAI Responses API Support:**
- :ref:`OpenAICompatibleModel ` and :ref:`OpenAIModel ` now support the OpenAI Responses API, which can be configured
+ :ref:`OpenAiCompatibleConfig ` and :ref:`OpenAIModel ` now support the OpenAI Responses API, which can be configured
using the ``api_type`` parameter, which accepts values from :ref:`OpenAIAPIType `.
This enhancement allows recent OpenAI models to better leverage advanced reasoning capabilities, resulting in significant performance improvements in workflows.
diff --git a/docs/pyagentspec/source/code_examples/adapter_autogen_quickstart.py b/docs/pyagentspec/source/code_examples/adapter_autogen_quickstart.py
new file mode 100644
index 00000000..22936592
--- /dev/null
+++ b/docs/pyagentspec/source/code_examples/adapter_autogen_quickstart.py
@@ -0,0 +1,94 @@
+# Copyright © 2025 Oracle and/or its affiliates.
+#
+# This software is under the Apache License 2.0
+# (LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0) or Universal Permissive License
+# (UPL) 1.0 (LICENSE-UPL or https://oss.oracle.com/licenses/upl), at your option.
+
+# isort:skip_file
+# fmt: off
+# mypy: ignore-errors
+
+# .. start-agentspec_to_runtime
+# Create a Agent Spec agent
+from pyagentspec.agent import Agent
+from pyagentspec.llms.openaicompatibleconfig import OpenAiCompatibleConfig
+from pyagentspec.property import FloatProperty
+from pyagentspec.tools import ServerTool
+
+subtraction_tool = ServerTool(
+ name="subtraction-tool",
+ description="subtract two numbers together",
+ inputs=[FloatProperty(title="a"), FloatProperty(title="b")],
+ outputs=[FloatProperty(title="difference")],
+)
+
+agentspec_llm_config = OpenAiCompatibleConfig(
+ name="llama-3.3-70b-instruct",
+ model_id="/storage/models/Llama-3.3-70B-Instruct",
+ url="url.to.my.llm",
+)
+
+agentspec_agent = Agent(
+ name="agentspec_tools_test",
+ description="agentspec_tools_test",
+ llm_config=agentspec_llm_config,
+ system_prompt="Perform subtraction with the given tool.",
+ tools=[subtraction_tool],
+)
+
+# Export the Agent Spec configuration
+from pyagentspec.serialization import AgentSpecSerializer
+
+agentspec_config = AgentSpecSerializer().to_json(agentspec_agent)
+
+# Load and run the Agent Spec configuration with AutoGen
+from pyagentspec.adapters.autogen import AgentSpecLoader
+
+def subtract(a: float, b: float) -> float:
+ return a - b
+
+async def main() -> None:
+ converter = AgentSpecLoader(tool_registry={"subtraction-tool": subtract})
+ component = converter.load_json(agentspec_config)
+ while True:
+ input_cmd = input("USER >> ")
+ if input_cmd == "q":
+ break
+ result = await component.run(task=input_cmd)
+ print(f"AGENT >> {result.messages[-1].content}")
+ await component._model_client.close()
+
+# anyio.run(main)
+# USER >> Compute 987654321-123456789
+# AGENT >> The result of the subtraction is 864197532.
+# .. end-agentspec_to_runtime
+# .. start-runtime_to_agentspec
+# Create an AutoGen Agent
+import os
+os.environ["OPENAI_API_KEY"] = "YOUR_API_KEY"
+from autogen_agentchat.agents import AssistantAgent
+from autogen_ext.models.openai import OpenAIChatCompletionClient
+
+async def add_tool(a: int, b: int) -> int:
+ """Adds a to b and returns the result"""
+ return a + b
+
+autogen_tools = {"add_tool": add_tool}
+
+model_client = OpenAIChatCompletionClient(
+ model="gpt-4.1",
+)
+
+autogen_agent = AssistantAgent(
+ name="assistant",
+ model_client=model_client,
+ tools=list(autogen_tools.values()),
+ system_message="Use tools to solve tasks, and reformulate the answers that you get.",
+ reflect_on_tool_use=True,
+)
+
+# Convert to Agent Spec
+from pyagentspec.adapters.autogen import AgentSpecExporter
+
+agentspec_config = AgentSpecExporter().to_json(autogen_agent)
+# .. end-runtime_to_agentspec
diff --git a/docs/pyagentspec/source/code_examples/adapter_crewai_quickstart.py b/docs/pyagentspec/source/code_examples/adapter_crewai_quickstart.py
new file mode 100644
index 00000000..e1ccce27
--- /dev/null
+++ b/docs/pyagentspec/source/code_examples/adapter_crewai_quickstart.py
@@ -0,0 +1,128 @@
+# Copyright © 2025 Oracle and/or its affiliates.
+#
+# This software is under the Apache License 2.0
+# (LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0) or Universal Permissive License
+# (UPL) 1.0 (LICENSE-UPL or https://oss.oracle.com/licenses/upl), at your option.
+
+# isort:skip_file
+# fmt: off
+# mypy: ignore-errors
+
+try:
+ import crewai # noqa: F401
+except ImportError:
+ exit() # Not installed
+except RuntimeError as e:
+ if "Your system has an unsupported version of sqlite3" in str(e):
+ # ChromaDB requires a version of SQLite which is not always supported
+ __import__("pysqlite3")
+ import sys
+ sys.modules["sqlite3"] = sys.modules.pop("pysqlite3")
+ else:
+ raise e # other error
+
+# .. start-agentspec_to_runtime
+# Create a Agent Spec agent
+from pyagentspec.agent import Agent
+from pyagentspec.llms.openaicompatibleconfig import OpenAiCompatibleConfig
+from pyagentspec.property import FloatProperty
+from pyagentspec.tools import ServerTool
+
+subtraction_tool = ServerTool(
+ name="subtraction-tool",
+ description="subtract two numbers together",
+ inputs=[FloatProperty(title="a"), FloatProperty(title="b")],
+ outputs=[FloatProperty(title="difference")],
+)
+
+agentspec_llm_config = OpenAiCompatibleConfig(
+ name="llama-3.3-70b-instruct",
+ model_id="/storage/models/Llama-3.3-70B-Instruct",
+ url="url.to.my.llm",
+)
+
+agentspec_agent = Agent(
+ name="agentspec_tools_test",
+ description="agentspec_tools_test",
+ llm_config=agentspec_llm_config,
+ system_prompt="Perform subtraction with the given tool.",
+ tools=[subtraction_tool],
+)
+
+# Export the Agent Spec configuration
+from pyagentspec.serialization import AgentSpecSerializer
+
+agentspec_config = AgentSpecSerializer().to_json(agentspec_agent)
+
+# Load and run the Agent Spec configuration with CrewAI
+import os
+os.environ["CREWAI_DISABLE_TELEMETRY"] = "true"
+from crewai import Crew, Task
+from pyagentspec.adapters.crewai import AgentSpecLoader
+
+def subtract(a: float, b: float) -> float:
+ return a - b
+
+async def main():
+ loader = AgentSpecLoader(tool_registry={"subtraction-tool": subtract})
+ assistant = loader.load_json(agentspec_config)
+
+ while True:
+ task = Task(
+ description="{user_input}",
+ expected_output="A helpful, concise reply to the user.",
+ agent=assistant,
+ async_execution=True
+ )
+ crew = Crew(agents=[assistant], tasks=[task])
+ user_input = input("USER >> ")
+ if user_input == "exit":
+ break
+ response = await crew.kickoff_async(inputs={"user_input": user_input})
+ print(f"AGENT >> {response}")
+
+
+# anyio.run(main)
+# USER >> Compute 987654321-123456789
+# AGENT >> 864197532
+# .. end-agentspec_to_runtime
+# .. start-runtime_to_agentspec
+# Create a CrewAI Agent
+from crewai import LLM, Agent
+from crewai.tools.base_tool import Tool
+from pydantic import BaseModel
+
+class InputSchema(BaseModel):
+ a: float
+ b: float
+
+def subtract(a: float, b: float) -> float:
+ """Subtract two numbers"""
+ return a - b
+
+llm = LLM(
+ model="hosted_vllm/Llama-4-Maverick",
+ api_base="http://url.to.my.llama.model/v1",
+ max_tokens=512,
+)
+
+crewai_agent = Agent(
+ role="Calculator agent",
+ goal="Computes the mathematical operation prompted by the user",
+ backstory="You are a calculator with 20 years of experience",
+ llm=llm,
+ tools=[
+ Tool(
+ name="subtract",
+ description="Subtract two numbers",
+ args_schema=InputSchema,
+ func=subtract,
+ ),
+ ],
+)
+
+# Convert to Agent Spec
+from pyagentspec.adapters.crewai import AgentSpecExporter
+
+agentspec_config = AgentSpecExporter().to_json(crewai_agent)
+# .. end-runtime_to_agentspec
diff --git a/docs/pyagentspec/source/code_examples/adapter_langgraph_quickstart.py b/docs/pyagentspec/source/code_examples/adapter_langgraph_quickstart.py
new file mode 100644
index 00000000..9f771262
--- /dev/null
+++ b/docs/pyagentspec/source/code_examples/adapter_langgraph_quickstart.py
@@ -0,0 +1,120 @@
+# Copyright © 2025 Oracle and/or its affiliates.
+#
+# This software is under the Apache License 2.0
+# (LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0) or Universal Permissive License
+# (UPL) 1.0 (LICENSE-UPL or https://oss.oracle.com/licenses/upl), at your option.
+
+# isort:skip_file
+# fmt: off
+# mypy: ignore-errors
+
+# .. start-agentspec_to_runtime
+# Create a Agent Spec agent
+from pyagentspec.agent import Agent
+from pyagentspec.llms.openaicompatibleconfig import OpenAiCompatibleConfig
+from pyagentspec.property import FloatProperty
+from pyagentspec.tools import ServerTool
+
+subtraction_tool = ServerTool(
+ name="subtraction-tool",
+ description="subtract two numbers together",
+ inputs=[FloatProperty(title="a"), FloatProperty(title="b")],
+ outputs=[FloatProperty(title="difference")],
+)
+
+agentspec_llm_config = OpenAiCompatibleConfig(
+ name="llama-3.3-70b-instruct",
+ model_id="/storage/models/Llama-3.3-70B-Instruct",
+ url="url.to.my.llm",
+)
+
+agentspec_agent = Agent(
+ name="agentspec_tools_test",
+ description="agentspec_tools_test",
+ llm_config=agentspec_llm_config,
+ system_prompt="Perform subtraction with the given tool.",
+ tools=[subtraction_tool],
+)
+
+# Export the Agent Spec configuration
+from pyagentspec.serialization import AgentSpecSerializer
+
+agentspec_config = AgentSpecSerializer().to_json(agentspec_agent)
+
+# Load and run the Agent Spec configuration with LangGraph
+from pyagentspec.adapters.langgraph import AgentSpecLoader
+
+def subtract(a: float, b: float) -> float:
+ return a - b
+
+async def main():
+ loader = AgentSpecLoader(tool_registry={"subtraction-tool": subtract})
+ assistant = loader.load_json(agentspec_config)
+
+ while True:
+ user_input = input("USER >> ")
+ if user_input == "exit":
+ break
+ result = await assistant.ainvoke(
+ input={"messages": [{"role": "user", "content": user_input}]},
+ )
+ print(f"AGENT >> {result['messages'][-1].content}")
+
+
+# anyio.run(main)
+# USER >> Compute 987654321-123456789
+# AGENT >> The result of this subtraction is 864197532.
+# .. end-agentspec_to_runtime
+# .. start-runtime_to_agentspec
+# Create a LangGraph Agent
+from typing_extensions import Any, TypedDict
+from langchain_openai.chat_models import ChatOpenAI
+from langgraph.graph import END, START, StateGraph
+from pydantic import SecretStr
+
+class InputSchema(TypedDict):
+ city: str
+
+class OutputSchema(TypedDict):
+ response: Any
+
+class InternalState(TypedDict):
+ weather_data: str
+
+def get_weather(state: InputSchema) -> InternalState:
+ """Returns the weather in a specific city.
+ Args
+ ----
+ city: The city to check the weather for
+
+ Returns
+ -------
+ weather: The weather in that city
+ """
+ return {"weather_data": f"The weather in {state['city']} is sunny."}
+
+def llm_node(state: InternalState) -> OutputSchema:
+ model = ChatOpenAI(
+ base_url="your.url.to.llm/v1",
+ model="/storage/models/Llama-3.1-70B-Instruct",
+ api_key=SecretStr("t"),
+ )
+ result = model.invoke(
+ f"Reformulate the following sentence to the user: {state['weather_data']}"
+ )
+ return {"response": result.content}
+
+graph = StateGraph(InternalState, input_schema=InputSchema, output_schema=OutputSchema)
+graph.add_node("get_weather", get_weather)
+graph.add_node("llm_node", llm_node)
+graph.add_edge(START, "get_weather")
+graph.add_edge("get_weather", "llm_node")
+graph.add_edge("llm_node", END)
+assistant_name = "Weather Flow"
+langgraph_agent = graph.compile(name=assistant_name)
+
+# Convert to Agent Spec
+from pyagentspec.adapters.langgraph import AgentSpecExporter
+
+agentspec_config = AgentSpecExporter().to_json(langgraph_agent)
+# .. end-runtime_to_agentspec
diff --git a/docs/pyagentspec/source/code_examples/adapter_wayflow_quickstart.py b/docs/pyagentspec/source/code_examples/adapter_wayflow_quickstart.py
new file mode 100644
index 00000000..3d2735ef
--- /dev/null
+++ b/docs/pyagentspec/source/code_examples/adapter_wayflow_quickstart.py
@@ -0,0 +1,98 @@
+# Copyright © 2025 Oracle and/or its affiliates.
+#
+# This software is under the Apache License 2.0
+# (LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0) or Universal Permissive License
+# (UPL) 1.0 (LICENSE-UPL or https://oss.oracle.com/licenses/upl), at your option.
+
+# isort:skip_file
+# fmt: off
+# mypy: ignore-errors
+
+exit() # wayflow not installed
+# .. start-agentspec_to_runtime
+# Create a Agent Spec agent
+from pyagentspec.agent import Agent
+from pyagentspec.llms.openaicompatibleconfig import OpenAiCompatibleConfig
+from pyagentspec.property import FloatProperty
+from pyagentspec.tools import ServerTool
+
+subtraction_tool = ServerTool(
+ name="subtraction-tool",
+ description="subtract two numbers together",
+ inputs=[FloatProperty(title="a"), FloatProperty(title="b")],
+ outputs=[FloatProperty(title="difference")],
+)
+
+agentspec_llm_config = OpenAiCompatibleConfig(
+ name="llama-3.3-70b-instruct",
+ model_id="/storage/models/Llama-3.3-70B-Instruct",
+ url="url.to.my.llm",
+)
+
+agentspec_agent = Agent(
+ name="agentspec_tools_test",
+ description="agentspec_tools_test",
+ llm_config=agentspec_llm_config,
+ system_prompt="Perform subtraction with the given tool.",
+ tools=[subtraction_tool],
+)
+
+# Export the Agent Spec configuration
+from pyagentspec.serialization import AgentSpecSerializer
+
+agentspec_config = AgentSpecSerializer().to_json(agentspec_agent)
+
+# Load and run the Agent Spec configuration with WayFlow
+from wayflowcore.agentspec import AgentSpecLoader
+
+def subtract(a: float, b: float) -> float:
+ return a - b
+
+async def main():
+ converter = AgentSpecLoader(tool_registry={"subtraction-tool": subtract})
+ assistant = converter.load_json(agentspec_config)
+ conversation = assistant.start_conversation()
+
+ while True:
+ user_input = input("USER >> ")
+ if user_input == "exit":
+ break
+ conversation.append_user_message(user_input)
+ await conversation.execute_async()
+ last = conversation.get_last_message()
+ print(f"AGENT >> {last.content}")
+
+# anyio.run(main)
+# USER >> Compute 987654321-123456789
+# AGENT >> The result of the subtraction is 864197532.
+# .. end-agentspec_to_runtime
+# .. start-runtime_to_agentspec
+# Create a WayFlow Agent
+from wayflowcore.agent import Agent
+from wayflowcore.models import OpenAICompatibleModel
+from wayflowcore.tools import tool
+
+@tool("subtraction-tool", description_mode="only_docstring")
+def subtraction_tool(a: float, b: float) -> float:
+ """subtract two numbers together"""
+ return a - b
+
+llm = OpenAICompatibleModel(
+ name="llama-3.3-70b-instruct",
+ model_id="/storage/models/Llama-3.3-70B-Instruct",
+ base_url="url.to.my.llm",
+)
+
+wayflow_agent = Agent(
+ name="wayflow_agent",
+ description="Simple agent with a tool.",
+ llm=llm,
+ custom_instruction="Perform subtraction with the given tool.",
+ tools=[subtraction_tool],
+)
+
+# Convert to Agent Spec
+from wayflowcore.agentspec import AgentSpecExporter
+
+agentspec_config = AgentSpecExporter().to_json(wayflow_agent)
+# .. end-runtime_to_agentspec
diff --git a/docs/pyagentspec/source/code_examples/wayflow_cross_framework_agent.py b/docs/pyagentspec/source/code_examples/wayflow_cross_framework_agent.py
index 7e3d6a0c..f095ff8d 100644
--- a/docs/pyagentspec/source/code_examples/wayflow_cross_framework_agent.py
+++ b/docs/pyagentspec/source/code_examples/wayflow_cross_framework_agent.py
@@ -130,16 +130,31 @@ def convert_agentspec_to_wayflow(agentspec_component: Component):
status = conversation.execute()
# .. end-conversation
+# .. using-langgraph-agentspec-adapter:
+# Load the Agent Spec component into a LangGraph assistant
+from pyagentspec.adapters.langgraph import AgentSpecLoader as LangGraphLoader
+
+loader = LangGraphLoader(tool_registry=tool_registry)
+agent = loader.load_component(deserialized_agentspec_agent)
+# .. end-using-langgraph-agentspec-adapter:
+# .. using-crewai-agentspec-adapter:
+# Load the Agent Spec component into a CrewAI assistant
+from pyagentspec.adapters.crewai import AgentSpecLoader as CrewAILoader
+
+loader = CrewAILoader(tool_registry=tool_registry)
+agent = loader.load_component(deserialized_agentspec_agent)
+# .. end-using-crewai-agentspec-adapter:
+# .. using-autogen-agentspec-adapter:
+# Load the Agent Spec component into a AutoGen assistant
+from pyagentspec.adapters.autogen import AgentSpecLoader as AutoGenLoader
+
+loader = AutoGenLoader(tool_registry=tool_registry)
+agent = loader.load_component(deserialized_agentspec_agent)
+# .. end-using-autogen-agentspec-adapter:
# .. using-wayflow-agentspec-adapter:
-from wayflowcore.agentspec import AgentSpecLoader
-from wayflowcore.tools import tool
+from wayflowcore.agentspec import AgentSpecLoader as WayFlowLoader
-loader = AgentSpecLoader(
- tool_registry={
- tool_name: tool(tool_function, description_mode="only_docstring")
- for tool_name, tool_function in tool_registry.items()
- }
-)
+loader = WayFlowLoader(tool_registry=tool_registry)
agent = loader.load_component(deserialized_agentspec_agent)
# .. end-using-wayflow-agentspec-adapter:
diff --git a/docs/pyagentspec/source/docs_home.rst b/docs/pyagentspec/source/docs_home.rst
index cd150d82..ce42d913 100644
--- a/docs/pyagentspec/source/docs_home.rst
+++ b/docs/pyagentspec/source/docs_home.rst
@@ -113,6 +113,17 @@ Agent Spec is developed jointly between Oracle Cloud Infrastructure and Oracle L
API Reference
+.. toctree::
+ :maxdepth: 1
+ :caption: Adapters
+ :hidden:
+
+ LangGraph
+ WayFlow
+ CrewAI
+ AutoGen
+
+
.. toctree::
:maxdepth: 1
:caption: Ecosystem
diff --git a/docs/pyagentspec/source/howtoguides/howto_execute_agentspec_across_frameworks.rst b/docs/pyagentspec/source/howtoguides/howto_execute_agentspec_across_frameworks.rst
index 813ee529..2083aab9 100644
--- a/docs/pyagentspec/source/howtoguides/howto_execute_agentspec_across_frameworks.rst
+++ b/docs/pyagentspec/source/howtoguides/howto_execute_agentspec_across_frameworks.rst
@@ -178,8 +178,62 @@ Finally, we can start the conversation with our new agent and execute it.
:end-before: .. end-conversation
-Using the Agent Spec adapter from WayFlow
-=========================================
+
+Using the native Agent Spec adapters
+====================================
+
+The execution of this section requires installing pyagentspec with the extension corresponding
+to the framework you want to use Agent Spec with.
+
+.. tabs::
+
+ .. tab:: LangGraph
+
+ .. code-block:: bash
+
+ # To use this adapter, please install pyagentspec with the "langgraph" extension.
+ pip install "pyagentspec[langgraph]"
+
+ .. literalinclude:: ../code_examples/wayflow_cross_framework_agent.py
+ :language: python
+ :start-after: .. using-langgraph-agentspec-adapter:
+ :end-before: .. end-using-langgraph-agentspec-adapter:
+
+ .. tab:: CrewAI
+
+ .. code-block:: bash
+
+ # To use this adapter, please install pyagentspec with the "crewai" extension.
+ pip install "pyagentspec[crewai]"
+
+ .. literalinclude:: ../code_examples/wayflow_cross_framework_agent.py
+ :language: python
+ :start-after: .. using-crewai-agentspec-adapter:
+ :end-before: .. end-using-crewai-agentspec-adapter:
+
+ .. tab:: AutoGen
+
+
+ .. code-block:: bash
+
+ # To use this adapter, please install pyagentspec with the "autogen" extension.
+ pip install "pyagentspec[autogen]"
+
+ .. literalinclude:: ../code_examples/wayflow_cross_framework_agent.py
+ :language: python
+ :start-after: .. using-autogen-agentspec-adapter:
+ :end-before: .. end-using-autogen-agentspec-adapter:
+
+
+The transformation can be easily performed using this library by using the ``AgentSpecLoader`` object,
+and calling the ``load_json`` method directly on the Agent Spec JSON representation of the agent,
+or the ``load_component`` method on the PyAgentSpec component object.
+
+You can find more information about the Agent Spec adapter in the Agent Spec :doc:`API Documentation <../api/adapters>`.
+
+
+Using the WayFlow Agent Spec adapter
+====================================
The execution of this section requires installing the package ``wayflowcore``.
diff --git a/docs/pyagentspec/source/howtoguides/howto_llm_from_different_providers.rst b/docs/pyagentspec/source/howtoguides/howto_llm_from_different_providers.rst
index 4606da14..5bc696a4 100644
--- a/docs/pyagentspec/source/howtoguides/howto_llm_from_different_providers.rst
+++ b/docs/pyagentspec/source/howtoguides/howto_llm_from_different_providers.rst
@@ -7,7 +7,7 @@ The available LLMs are:
- :ref:`OpenAiConfig `
- :ref:`OciGenAiConfig `
-- :ref:`OpenAiCompatibleConfig `
+- :ref:`OpenAiCompatibleConfig `
- :ref:`VllmConfig `
- :ref:`OllamaConfig `
diff --git a/pyagentspec/setup.py b/pyagentspec/setup.py
index 27fff1bc..ba9b438d 100644
--- a/pyagentspec/setup.py
+++ b/pyagentspec/setup.py
@@ -73,8 +73,8 @@ def read(file_name):
"autogen-agentchat>=0.5.6; python_version < '3.13'",
],
"langgraph": [
- "langgraph>=0.5.3",
- "langchain-core>=0.3",
+ "langgraph>=0.5.3,<1.0.0",
+ "langchain-core>=0.3,<1.0.0",
"langchain-openai>=0.3.7",
"langchain-ollama>=0.3.3",
],
diff --git a/pyagentspec/src/pyagentspec/adapters/autogen/_autogenconverter.py b/pyagentspec/src/pyagentspec/adapters/autogen/_autogenconverter.py
index d57ccbb0..65649e28 100644
--- a/pyagentspec/src/pyagentspec/adapters/autogen/_autogenconverter.py
+++ b/pyagentspec/src/pyagentspec/adapters/autogen/_autogenconverter.py
@@ -7,7 +7,7 @@
import keyword
import re
-from typing import Any, Dict, List, Optional, Union, cast, get_args
+from typing import Any, Dict, List, Optional, cast, get_args
from urllib.parse import urljoin
import httpx
@@ -30,8 +30,10 @@
from pyagentspec.component import Component as AgentSpecComponent
from pyagentspec.llms import LlmConfig as AgentSpecLlmConfig
from pyagentspec.llms.ollamaconfig import OllamaConfig as AgentSpecOllamaModel
+from pyagentspec.llms.openaicompatibleconfig import (
+ OpenAiCompatibleConfig as AgentSpecOpenAiCompatibleModel,
+)
from pyagentspec.llms.openaiconfig import OpenAiConfig as AgentSpecOpenAiConfig
-from pyagentspec.llms.vllmconfig import VllmConfig as AgentSpecVllmModel
from pyagentspec.property import Property as AgentSpecProperty
from pyagentspec.property import _empty_default as _agentspec_empty_default
from pyagentspec.tools import Tool as AgentSpecTool
@@ -157,7 +159,7 @@ def _llm_convert_to_autogen(
) -> AutogenChatCompletionClient:
def _prepare_llm_args(
- agentspec_llm_: Union[AgentSpecVllmModel, AgentSpecOllamaModel],
+ agentspec_llm_: AgentSpecOpenAiCompatibleModel,
) -> Dict[str, Any]:
metadata = getattr(agentspec_llm_, "metadata", {}) or {}
base_url = agentspec_llm_.url
@@ -193,10 +195,10 @@ def _prepare_llm_args(
if isinstance(agentspec_llm, AgentSpecOpenAiConfig):
return AutogenOpenAIChatCompletionClient(model=agentspec_llm.model_id)
- elif isinstance(agentspec_llm, AgentSpecVllmModel):
- return AutogenOpenAIChatCompletionClient(**_prepare_llm_args(agentspec_llm))
elif isinstance(agentspec_llm, AgentSpecOllamaModel):
return AutogenOllamaChatCompletionClient(**_prepare_llm_args(agentspec_llm))
+ elif isinstance(agentspec_llm, AgentSpecOpenAiCompatibleModel):
+ return AutogenOpenAIChatCompletionClient(**_prepare_llm_args(agentspec_llm))
else:
raise NotImplementedError(
f"The provided LlmConfig type `{type(agentspec_llm)}` is not supported in autogen yet."
diff --git a/pyagentspec/src/pyagentspec/adapters/crewai/_crewaiconverter.py b/pyagentspec/src/pyagentspec/adapters/crewai/_crewaiconverter.py
index 92c086eb..13144590 100644
--- a/pyagentspec/src/pyagentspec/adapters/crewai/_crewaiconverter.py
+++ b/pyagentspec/src/pyagentspec/adapters/crewai/_crewaiconverter.py
@@ -139,14 +139,14 @@ def parse_url(url: str) -> str:
llm_parameters: Dict[str, Any] = {}
if isinstance(agentspec_llm, AgentSpecOpenAiConfig):
llm_parameters["model"] = "openai/" + agentspec_llm.model_id
- elif isinstance(agentspec_llm, AgentSpecOpenAiCompatibleConfig):
- llm_parameters["model"] = "openai/" + agentspec_llm.model_id
- llm_parameters["api_base"] = parse_url(agentspec_llm.url)
elif isinstance(agentspec_llm, AgentSpecVllmModel):
# CrewAI uses lite llm underneath:
# https://community.crewai.com/t/help-how-to-use-a-custom-local-llm-with-vllm/5746
llm_parameters["model"] = "hosted_vllm/" + agentspec_llm.model_id
llm_parameters["api_base"] = parse_url(agentspec_llm.url)
+ elif isinstance(agentspec_llm, AgentSpecOpenAiCompatibleConfig):
+ llm_parameters["model"] = "openai/" + agentspec_llm.model_id
+ llm_parameters["api_base"] = parse_url(agentspec_llm.url)
elif isinstance(agentspec_llm, AgentSpecOllamaModel):
llm_parameters["model"] = "ollama/" + agentspec_llm.model_id
llm_parameters["base_url"] = parse_url(agentspec_llm.url)
diff --git a/pyagentspec/tests/adapters/autogen/test_quickstart_example.py b/pyagentspec/tests/adapters/autogen/test_quickstart_example.py
new file mode 100644
index 00000000..d2129259
--- /dev/null
+++ b/pyagentspec/tests/adapters/autogen/test_quickstart_example.py
@@ -0,0 +1,47 @@
+# Copyright © 2025 Oracle and/or its affiliates.
+#
+# This software is under the Apache License 2.0
+# (LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0) or Universal Permissive License
+# (UPL) 1.0 (LICENSE-UPL or https://oss.oracle.com/licenses/upl), at your option.
+
+import anyio
+
+from pyagentspec.agent import Agent
+
+
+def test_quickstart_example_runs(quickstart_agent_json: Agent):
+
+ from pyagentspec.adapters.autogen import AgentSpecLoader
+
+ def subtract(a: float, b: float) -> float:
+ return a - b
+
+ async def main():
+ converter = AgentSpecLoader(tool_registry={"subtraction-tool": subtract})
+ component = converter.load_json(quickstart_agent_json)
+ _ = await component.run(task="Compute 987654321-123456789")
+
+ anyio.run(main)
+
+
+def test_can_convert_quickstart_example_to_agentspec() -> None:
+ from autogen_agentchat.agents import AssistantAgent
+ from autogen_ext.models.openai import OpenAIChatCompletionClient
+
+ from pyagentspec.adapters.autogen import AgentSpecExporter
+
+ async def add_tool(a: int, b: int) -> int:
+ """Adds a to b and returns the result"""
+ return a + b
+
+ autogen_tools = {"add_tool": add_tool}
+ model_client = OpenAIChatCompletionClient(model="gpt-4.1")
+ autogen_agent = AssistantAgent(
+ name="assistant",
+ model_client=model_client,
+ tools=list(autogen_tools.values()),
+ system_message="Use tools to solve tasks, and reformulate the answers that you get.",
+ reflect_on_tool_use=True,
+ )
+
+ _ = AgentSpecExporter().to_json(autogen_agent)
diff --git a/pyagentspec/tests/adapters/conftest.py b/pyagentspec/tests/adapters/conftest.py
index f28bd0bd..fd49fdbc 100644
--- a/pyagentspec/tests/adapters/conftest.py
+++ b/pyagentspec/tests/adapters/conftest.py
@@ -140,3 +140,35 @@ def skip_tests_if_dependency_not_installed(
# If the dependency is installed we run all the tests except the one that checks the import error
if item.name == f"test_import_raises_if_{module_name}_not_installed":
item.add_marker(pytest.mark.skip(reason=f"`{module_name}` is installed"))
+
+
+@pytest.fixture
+def quickstart_agent_json() -> str:
+ from pyagentspec.agent import Agent
+ from pyagentspec.llms.openaicompatibleconfig import OpenAiCompatibleConfig
+ from pyagentspec.property import FloatProperty
+ from pyagentspec.serialization import AgentSpecSerializer
+ from pyagentspec.tools import ServerTool
+
+ subtraction_tool = ServerTool(
+ name="subtraction-tool",
+ description="subtract two numbers together",
+ inputs=[FloatProperty(title="a"), FloatProperty(title="b")],
+ outputs=[FloatProperty(title="difference")],
+ )
+
+ agentspec_llm_config = OpenAiCompatibleConfig(
+ name="llama-3.3-70b-instruct",
+ model_id="/storage/models/Llama-3.3-70B-Instruct",
+ url=os.environ["LLAMA70BV33_API_URL"],
+ )
+
+ agent = Agent(
+ name="agentspec_tools_test",
+ description="agentspec_tools_test",
+ llm_config=agentspec_llm_config,
+ system_prompt="Perform subtraction with the given tool.",
+ tools=[subtraction_tool],
+ )
+
+ return AgentSpecSerializer().to_json(agent)
diff --git a/pyagentspec/tests/adapters/crewai/conftest.py b/pyagentspec/tests/adapters/crewai/conftest.py
index 697a7d4b..75d086a1 100644
--- a/pyagentspec/tests/adapters/crewai/conftest.py
+++ b/pyagentspec/tests/adapters/crewai/conftest.py
@@ -4,11 +4,14 @@
# (LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0) or Universal Permissive License
# (UPL) 1.0 (LICENSE-UPL or https://oss.oracle.com/licenses/upl), at your option.
+import json
import os
from pathlib import Path
from typing import Any
+from urllib.parse import urljoin
import pytest
+import requests
from ..conftest import skip_tests_if_dependency_not_installed
@@ -32,3 +35,22 @@ def _disable_tracing():
finally:
if old_value is not None:
os.environ["CREWAI_DISABLE_TELEMETRY"] = old_value
+
+
+@pytest.fixture(autouse=True)
+def no_network_plusapi(monkeypatch):
+ try:
+ from crewai.cli.plus_api import PlusAPI
+
+ def fake_response(self, method: str, endpoint: str, **kwargs) -> requests.Response:
+ resp = requests.Response()
+ resp.status_code = 200
+ resp.url = urljoin(self.base_url, endpoint)
+ resp.headers["Content-Type"] = "application/json"
+ resp._content = json.dumps({"ok": True}).encode("utf-8")
+ resp.encoding = "utf-8"
+ return resp
+
+ monkeypatch.setattr(PlusAPI, "_make_request", fake_response, raising=True)
+ except ImportError:
+ pass
diff --git a/pyagentspec/tests/adapters/crewai/test_quickstart_example.py b/pyagentspec/tests/adapters/crewai/test_quickstart_example.py
new file mode 100644
index 00000000..6c3ba761
--- /dev/null
+++ b/pyagentspec/tests/adapters/crewai/test_quickstart_example.py
@@ -0,0 +1,72 @@
+# Copyright © 2025 Oracle and/or its affiliates.
+#
+# This software is under the Apache License 2.0
+# (LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0) or Universal Permissive License
+# (UPL) 1.0 (LICENSE-UPL or https://oss.oracle.com/licenses/upl), at your option.
+
+import anyio
+
+from pyagentspec.agent import Agent
+
+
+def test_quickstart_example_runs(quickstart_agent_json: Agent):
+
+ from crewai import Crew, Task
+
+ from pyagentspec.adapters.crewai import AgentSpecLoader
+
+ def subtract(a: float, b: float) -> float:
+ return a - b
+
+ async def main():
+ loader = AgentSpecLoader(tool_registry={"subtraction-tool": subtract})
+ assistant = loader.load_json(quickstart_agent_json)
+
+ task = Task(
+ description="{user_input}",
+ expected_output="A helpful, concise reply to the user.",
+ agent=assistant,
+ async_execution=True,
+ )
+ crew = Crew(agents=[assistant], tasks=[task])
+ _ = await crew.kickoff_async(inputs={"user_input": "Compute 987654321-123456789"})
+
+ anyio.run(main)
+
+
+def test_can_convert_quickstart_example_to_agentspec() -> None:
+ from crewai import LLM, Agent
+ from crewai.tools.base_tool import Tool
+ from pydantic import BaseModel
+
+ from pyagentspec.adapters.crewai import AgentSpecExporter
+
+ class InputSchema(BaseModel):
+ a: float
+ b: float
+
+ def subtract(a: float, b: float) -> float:
+ """Subtract two numbers"""
+ return a - b
+
+ llm = LLM(
+ model="hosted_vllm/Llama-4-Maverick",
+ api_base="http://url.to.my.llama.model/v1",
+ max_tokens=512,
+ )
+
+ crewai_agent = Agent(
+ role="Calculator agent",
+ goal="Computes the mathematical operation prompted by the user",
+ backstory="You are a calculator with 20 years of experience",
+ llm=llm,
+ tools=[
+ Tool(
+ name="subtract",
+ description="Subtract two numbers",
+ args_schema=InputSchema,
+ func=subtract,
+ ),
+ ],
+ )
+ _ = AgentSpecExporter().to_json(crewai_agent)
diff --git a/pyagentspec/tests/adapters/langgraph/test_quickstart_example.py b/pyagentspec/tests/adapters/langgraph/test_quickstart_example.py
new file mode 100644
index 00000000..4da9abd8
--- /dev/null
+++ b/pyagentspec/tests/adapters/langgraph/test_quickstart_example.py
@@ -0,0 +1,85 @@
+# Copyright © 2025 Oracle and/or its affiliates.
+#
+# This software is under the Apache License 2.0
+# (LICENSE-APACHE or http://www.apache.org/licenses/LICENSE-2.0) or Universal Permissive License
+# (UPL) 1.0 (LICENSE-UPL or https://oss.oracle.com/licenses/upl), at your option.
+
+import anyio
+import pytest
+
+from pyagentspec.agent import Agent
+
+
+@pytest.mark.filterwarnings(
+ f"ignore:`config_type` is deprecated and will be removed:DeprecationWarning"
+)
+def test_quickstart_example_runs(quickstart_agent_json: Agent):
+ from pyagentspec.adapters.langgraph import AgentSpecLoader
+
+ def subtract(a: float, b: float) -> float:
+ return a - b
+
+ async def main():
+ loader = AgentSpecLoader(tool_registry={"subtraction-tool": subtract})
+ assistant = loader.load_json(quickstart_agent_json)
+
+ _ = await assistant.ainvoke(
+ input={"messages": [{"role": "user", "content": "Compute 987654321-123456789"}]},
+ )
+
+ anyio.run(main)
+
+
+@pytest.mark.filterwarnings(
+ f"ignore:`config_type` is deprecated and will be removed:DeprecationWarning"
+)
+def test_can_convert_quickstart_example_to_agentspec() -> None:
+ from langchain_openai.chat_models import ChatOpenAI
+ from langgraph.graph import END, START, StateGraph
+ from pydantic import SecretStr
+ from typing_extensions import Any, TypedDict
+
+ from pyagentspec.adapters.langgraph import AgentSpecExporter
+
+ class InputSchema(TypedDict):
+ city: str
+
+ class OutputSchema(TypedDict):
+ response: Any
+
+ class InternalState(TypedDict):
+ weather_data: str
+
+ def get_weather(state: InputSchema) -> InternalState:
+ """Returns the weather in a specific city.
+ Args
+ ----
+ city: The city to check the weather for
+
+ Returns
+ -------
+ weather: The weather in that city
+ """
+ return {"weather_data": f"The weather in {state['city']} is sunny."}
+
+ def llm_node(state: InternalState) -> OutputSchema:
+ model = ChatOpenAI(
+ base_url="your.url.to.llm/v1",
+ model="/storage/models/Llama-3.1-70B-Instruct",
+ api_key=SecretStr("t"),
+ )
+ result = model.invoke(
+ f"Reformulate the following sentence to the user: {state['weather_data']}"
+ )
+ return {"response": result.content}
+
+ graph = StateGraph(InternalState, input_schema=InputSchema, output_schema=OutputSchema)
+ graph.add_node("get_weather", get_weather)
+ graph.add_node("llm_node", llm_node)
+ graph.add_edge(START, "get_weather")
+ graph.add_edge("get_weather", "llm_node")
+ graph.add_edge("llm_node", END)
+ assistant_name = "Weather Flow"
+ langgraph_agent = graph.compile(name=assistant_name)
+
+ _ = AgentSpecExporter().to_json(langgraph_agent)