Skip to content
Merged
Show file tree
Hide file tree
Changes from 18 commits
Commits
Show all changes
20 commits
Select commit Hold shift + click to select a range
dccc37c
feat: add Google Gemini as a built-in model provider
claude Mar 14, 2026
11b4e95
feat: update Gemini models to latest and add Japanese/French i18n
claude Mar 15, 2026
8c328c1
Add google-genai as optional dependency group for Gemini provider
claude Mar 15, 2026
bf0be40
Make google-genai a core dependency instead of optional
claude Mar 15, 2026
a9fd162
Bump google-genai minimum version to >=1.67.0
claude Mar 15, 2026
8c0fc65
Fix formatting and lint issues for CI checks
claude Mar 15, 2026
c919e93
Remove French and Japanese locale additions to reduce PR scope
claude Mar 15, 2026
8f7ffae
Remove leftover FR locale import from console i18n
claude Mar 15, 2026
058c1b5
Revert unrelated console formatting changes to reduce PR scope
claude Mar 15, 2026
947708d
Add unit tests for GeminiProvider
claude Mar 15, 2026
60238fb
Reorder model normalization to strip prefix before display_name fallback
claude Mar 15, 2026
8f36a25
Narrow exception handling from generic Exception to genai APIError
claude Mar 15, 2026
94825f3
Update Gemini fallback model IDs to match current API names
claude Mar 15, 2026
cd58e66
Add GeminiChatModel to ChatModelName Literal type
claude Mar 15, 2026
c64be9d
Fix Gemini provider hanging on save: add missing await and timeout
claude Mar 15, 2026
236f7a9
Address PR review feedback: add generic Exception fallback and fix docs
ekzhu Mar 15, 2026
491fa6b
Fix pylint warnings in Gemini provider tests
ekzhu Mar 15, 2026
2bc4238
Fix formatting in providers router (trailing comma, black)
ekzhu Mar 15, 2026
c198b8c
Update gemini_provider.py
ekzhu Mar 16, 2026
0979019
Merge branch 'main' into claude/add-gemini-provider-3Gy4l
xieyxclack Mar 16, 2026
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ dependencies = [
"aiofiles>=24.1.0",
"paho-mqtt>=2.0.0",
"matrix-nio>=0.24.0",
"google-genai>=1.67.0",
]

[tool.setuptools.dynamic]
Expand Down
9 changes: 9 additions & 0 deletions src/copaw/agents/model_factory.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,13 @@
AnthropicChatFormatter = None
AnthropicChatModel = None

try:
from agentscope.formatter import GeminiChatFormatter
from agentscope.model import GeminiChatModel
except ImportError: # pragma: no cover - compatibility fallback
GeminiChatFormatter = None
GeminiChatModel = None

from .utils.tool_message_utils import _sanitize_tool_messages
from ..providers import ProviderManager
from ..providers.retry_chat_model import RetryChatModel
Expand Down Expand Up @@ -82,6 +89,8 @@ async def wrapper(
}
if AnthropicChatModel is not None and AnthropicChatFormatter is not None:
_CHAT_MODEL_FORMATTER_MAP[AnthropicChatModel] = AnthropicChatFormatter
if GeminiChatModel is not None and GeminiChatFormatter is not None:
_CHAT_MODEL_FORMATTER_MAP[GeminiChatModel] = GeminiChatFormatter


def _get_formatter_for_chat_model(
Expand Down
6 changes: 5 additions & 1 deletion src/copaw/app/routers/providers.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,11 @@

router = APIRouter(prefix="/models", tags=["models"])

ChatModelName = Literal["OpenAIChatModel", "AnthropicChatModel"]
ChatModelName = Literal[
"OpenAIChatModel",
"AnthropicChatModel",
"GeminiChatModel",
]


def get_provider_manager(request: Request) -> ProviderManager:
Expand Down
130 changes: 130 additions & 0 deletions src/copaw/providers/gemini_provider.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,130 @@
# -*- coding: utf-8 -*-
"""A Google Gemini provider implementation using AgentScope's native
GeminiChatModel."""

from __future__ import annotations

from typing import Any, List

from agentscope.model import ChatModelBase
from google import genai
from google.genai import errors as genai_errors
from google.genai import types as genai_types

from copaw.providers.provider import ModelInfo, Provider


class GeminiProvider(Provider):
"""Provider implementation for Google Gemini API."""

def _client(self, timeout: float = 5) -> Any:
return genai.Client(
api_key=self.api_key,
http_options=genai_types.HttpOptions(timeout=int(timeout * 1000)),
)

@staticmethod
def _normalize_models_payload(payload: Any) -> List[ModelInfo]:
models: List[ModelInfo] = []
for row in payload or []:
model_id = str(getattr(row, "name", "") or "").strip()

if not model_id:
continue

# Gemini API returns model names like "models/gemini-2.5-flash"
# Strip the "models/" prefix for cleaner IDs
if model_id.startswith("models/"):
model_id = model_id[len("models/") :]

display_name = str(
getattr(row, "display_name", "") or model_id,
).strip()

if not display_name or display_name.startswith("models/"):
display_name = model_id

models.append(ModelInfo(id=model_id, name=display_name))

deduped: List[ModelInfo] = []
seen: set[str] = set()
for model in models:
if model.id in seen:
continue
seen.add(model.id)
deduped.append(model)
return deduped

async def check_connection(self, timeout: float = 5) -> tuple[bool, str]:
"""Check if Google Gemini provider is reachable."""
try:
client = self._client(timeout=timeout)
# Use the async list models endpoint to verify connectivity
async for _ in await client.aio.models.list():
break
return True, ""
except genai_errors.APIError:
return (
False,
"Failed to connect to Google Gemini API. "
"Check your API key.",
)
except Exception:
return (
False,
"Unknown exception when connecting to Google Gemini API.",
)

async def fetch_models(self, timeout: float = 5) -> List[ModelInfo]:
"""Fetch available models from Gemini API."""
try:
client = self._client(timeout=timeout)
payload = []
async for model in await client.aio.models.list():
payload.append(model)
models = self._normalize_models_payload(payload)
return models
except genai_errors.APIError:
return []
except Exception:
return []

async def check_model_connection(
self,
model_id: str,
timeout: float = 5,
) -> tuple[bool, str]:
"""Check if a specific Gemini model is reachable/usable."""
target = (model_id or "").strip()
if not target:
return False, "Empty model ID"

try:
client = self._client(timeout=timeout)
response = await client.aio.models.generate_content_stream(
model=target,
contents="ping",
)
async for _ in response:
break
return True, ""
except genai_errors.APIError:
return (
False,
f"Model '{model_id}' is not reachable or usable",
)
except Exception:
return (
False,
f"Unknown exception when connecting to model '{model_id}'",
)

def get_chat_model_instance(self, model_id: str) -> ChatModelBase:
from agentscope.model import GeminiChatModel

return GeminiChatModel(
model_name=model_id,
stream=True,
api_key=self.api_key,
generate_kwargs=self.generate_kwargs,
)
28 changes: 28 additions & 0 deletions src/copaw/providers/provider_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
)
from copaw.providers.openai_provider import OpenAIProvider
from copaw.providers.anthropic_provider import AnthropicProvider
from copaw.providers.gemini_provider import GeminiProvider
from copaw.providers.ollama_provider import OllamaProvider
from copaw.constant import SECRET_DIR
from copaw.local_models import create_local_chat_model
Expand Down Expand Up @@ -92,6 +93,19 @@

ANTHROPIC_MODELS: List[ModelInfo] = []

GEMINI_MODELS: List[ModelInfo] = [
ModelInfo(id="gemini-3.1-pro-preview", name="Gemini 3.1 Pro Preview"),
ModelInfo(id="gemini-3-flash-preview", name="Gemini 3 Flash Preview"),
ModelInfo(
id="gemini-3.1-flash-lite-preview",
name="Gemini 3.1 Flash Lite Preview",
),
ModelInfo(id="gemini-2.5-pro", name="Gemini 2.5 Pro"),
ModelInfo(id="gemini-2.5-flash", name="Gemini 2.5 Flash"),
ModelInfo(id="gemini-2.5-flash-lite", name="Gemini 2.5 Flash Lite"),
ModelInfo(id="gemini-2.0-flash", name="Gemini 2.0 Flash"),
]

PROVIDER_MODELSCOPE = OpenAIProvider(
id="modelscope",
name="ModelScope",
Expand Down Expand Up @@ -169,6 +183,17 @@
freeze_url=True,
)

PROVIDER_GEMINI = GeminiProvider(
id="gemini",
name="Google Gemini",
base_url="https://generativelanguage.googleapis.com",
api_key_prefix="",
models=GEMINI_MODELS,
chat_model="GeminiChatModel",
freeze_url=True,
support_model_discovery=True,
)

PROVIDER_OLLAMA = OllamaProvider(
id="ollama",
name="Ollama",
Expand Down Expand Up @@ -244,6 +269,7 @@ def _init_builtins(self):
self._add_builtin(PROVIDER_AZURE_OPENAI)
self._add_builtin(PROVIDER_MINIMAX)
self._add_builtin(PROVIDER_ANTHROPIC)
self._add_builtin(PROVIDER_GEMINI)
self._add_builtin(PROVIDER_OLLAMA)
self._add_builtin(PROVIDER_LMSTUDIO)
self._add_builtin(PROVIDER_LLAMACPP)
Expand Down Expand Up @@ -454,6 +480,8 @@ def _provider_from_data(self, data: Dict) -> Provider:

if provider_id == "anthropic" or chat_model == "AnthropicChatModel":
return AnthropicProvider.model_validate(data)
if provider_id == "gemini" or chat_model == "GeminiChatModel":
return GeminiProvider.model_validate(data)
if provider_id == "ollama":
return OllamaProvider.model_validate(data)
if data.get("is_local", False):
Expand Down
Loading
Loading