Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions src/providers/frameworks/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ import { LangGraphPyFrameworkProvider } from "./langgraph-py/index.js";
import { LangGraphTSFrameworkProvider } from "./langgraph-ts/index.js";
import { GoogleAdkFrameworkProvider } from "./google-adk/index.js";
import { VercelAIFrameworkProvider } from "./vercel-ai/index.js";
import { LettaPyFrameworkProvider } from "./letta-py/index.js";

export type MCPServerConfig =
| {
Expand Down Expand Up @@ -63,6 +64,7 @@ const PROVIDERS: Record<string, FrameworkProvider> = {
"langgraph-ts": LangGraphTSFrameworkProvider,
"google-adk": GoogleAdkFrameworkProvider,
"vercel-ai": VercelAIFrameworkProvider,
"letta-py": LettaPyFrameworkProvider,
};

/**
Expand Down
18 changes: 18 additions & 0 deletions src/providers/frameworks/letta-py/index.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
import type { FrameworkProvider } from "../index.js";
import { getKnowledge } from "./knowledge.js";
import { getMCPConfig } from "./mcp-config.js";

/**
* Letta framework provider implementation.
* Provides Python-based agent framework with stateful memory and MCP integration.
*/
export const LettaPyFrameworkProvider: FrameworkProvider = {
id: "letta-py",
displayName: "Letta (Python)",
language: "python",
getKnowledge,
getMCPConfig,
setup: async () => {
// Letta SDK uses MCP for documentation - no additional setup files needed
},
};
24 changes: 24 additions & 0 deletions src/providers/frameworks/letta-py/knowledge.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
import type { FrameworkKnowledge } from "../index.js";
import type { ProjectConfig } from "../../../types.js";

export const getKnowledge = ({
config: _config,
}: {
config: ProjectConfig;
}): FrameworkKnowledge => ({
setupInstructions: "pip install letta-client",

knowledgeBase: `Always use the Letta Python SDK for stateful agents:

Check failure on line 11 in src/providers/frameworks/letta-py/knowledge.ts

View workflow job for this annotation

GitHub Actions / test (24)

Object literal may only specify known properties, and 'knowledgeBase' does not exist in type 'FrameworkKnowledge'.

- Letta manages long-term memory and agent state
- Use memory blocks for self-editing memory
- Start with single agent, scale only when needed
- PostgreSQL for production, SQLite for development

Basic Usage:
from letta_client import Letta
client = Letta(api_key=os.getenv("LETTA_API_KEY"))
agent = client.agents.create()
response = client.agents.messages.create(agent_id=agent.id, messages=[...])
`,
});
13 changes: 13 additions & 0 deletions src/providers/frameworks/letta-py/mcp-config.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
import type { MCPServerConfig } from "../index.js";

/**
* Returns Letta MCP server configuration for documentation.
* Uses hosted docs server for Letta SDK reference.
*
* @returns MCP server configuration object
*/
export const getMCPConfig = (): MCPServerConfig => ({
type: "stdio",
command: "npx",
args: ["-y", "mcp-remote", "https://docs.letta.com/mcp"],
});
15 changes: 15 additions & 0 deletions src/providers/frameworks/letta-py/templates/python/.env.example
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
# Letta SDK Configuration
# Copy this file to .env and fill in your actual values

# Your Letta API Key from https://app.letta.com/api-keys
LETTA_API_KEY=your-api-key-here

# Optional: Letta Server URL (if using self-hosted)
# LETTA_SERVER_URL=http://localhost:8080

# OpenAI API Key (required for chat models)
OPENAI_API_KEY=your-openai-api-key

# Optional: Custom model configuration
# LETTA_MODEL=openai/gpt-4
# LETTA_EMBEDDING_MODEL=openai/text-embedding-3-small
75 changes: 75 additions & 0 deletions src/providers/frameworks/letta-py/templates/python/agent.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,75 @@
"""Letta Python Agent Template

This template shows how to create a stateful Letta agent using the Python SDK.
Letta agents maintain persistent memory and learn from past conversations.
"""

import os
from letta_client import Letta


def create_agent():
"""
Creates a Letta agent with memory blocks and tools.

Returns:
agent_state: The created agent state object with agent ID
"""
# Initialize Letta client with API key from environment
client = Letta(api_key=os.getenv("LETTA_API_KEY"))

# Create agent with memory blocks and tools
agent_state = client.agents.create(
model="openai/gpt-4",
embedding="openai/text-embedding-3-small",
memory_blocks=[
{
"label": "human",
"value": "User: Name and current status"
},
{
"label": "persona",
"value": "I am a helpful Letta agent built with Better Agents."
}
],
tools=["web_search", "run_code"]
)

return agent_state


def message_agent(agent_id: str, user_message: str) -> dict:
"""
Send a message to an agent and get its response.

Args:
agent_id: The ID of the agent to message
user_message: The user's message

Returns:
dict: The response containing messages, tool calls, and reasoning steps
"""
client = Letta(api_key=os.getenv("LETTA_API_KEY"))

# Send message to agent
response = client.agents.messages.create(
agent_id=agent_id,
input=user_message
)

return response


if __name__ == "__main__":
# Example usage
print("Creating Letta agent...")
agent = create_agent()
print(f"Agent created with ID: {agent.id}")

# Send a test message
print(f"\nMessaging agent...")
response = message_agent(agent.id, "Hello! What can you do?")

# Print response messages
for message in response.messages:
print(f"{message.get('message_type', 'message')}: {message.get('content', message.get('reasoning', ''))}")
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
letta-client>=0.2.0
openai>=1.0.0
python-dotenv>=1.0.0
Loading