diff --git a/.github/workflows/update.yml b/.github/workflows/update.yml index 6de8341c..fb4607bd 100644 --- a/.github/workflows/update.yml +++ b/.github/workflows/update.yml @@ -17,9 +17,21 @@ jobs: - uses: actions/setup-go@v6 with: go-version-file: go.mod - - run: go run ./cmd/openrouter/main.go + - name: Restore APIpie cache + uses: actions/cache@v4 + with: + path: cmd/apipie/cache.db + key: apipie-cache-${{ hashFiles('cmd/apipie/cache.go') }} + restore-keys: | + apipie-cache- + - name: Generate OpenRouter models + run: go run ./cmd/openrouter/main.go # we need to add this back when we know that the providers/models all work # - run: go run ./cmd/huggingface/main.go + - name: Generate APIpie models + env: + APIPIE_DISPLAY_NAME_API_KEY: ${{ secrets.APIPIE_DISPLAY_NAME_API_KEY }} + run: go run ./cmd/apipie/main.go ./cmd/apipie/cache.go - uses: stefanzweifel/git-auto-commit-action@28e16e81777b558cc906c8750092100bbb34c5e3 # v5 with: commit_message: "chore: auto-update generated files" diff --git a/.gitignore b/.gitignore index 11bbf384..98afa26d 100644 --- a/.gitignore +++ b/.gitignore @@ -34,3 +34,6 @@ go.work.sum # crush .crush dist/ + +# apipie model name cache +cmd/apipie/cache.db diff --git a/Taskfile.yaml b/Taskfile.yaml index 580d4329..8a34b9fb 100644 --- a/Taskfile.yaml +++ b/Taskfile.yaml @@ -7,6 +7,12 @@ tasks: cmds: - go run cmd/openrouter/main.go + generate:apipie: + desc: Generate APIpie models + aliases: [gen:apipie] + cmds: + - go run cmd/apipie/main.go + lint: desc: Run linters cmds: diff --git a/cmd/apipie/README.md b/cmd/apipie/README.md new file mode 100644 index 00000000..02e3dafe --- /dev/null +++ b/cmd/apipie/README.md @@ -0,0 +1,36 @@ +# APIpie Model Configuration Generator + +This tool fetches models from APIpie.ai and generates a configuration file for the provider. + +## LLM-Enhanced Display Names + +This tool includes an optional feature to generate professional display names for AI models using APIpie.ai's LLM service. This feature is **sponsored** to improve the user experience of this open source project. + +### Configuration + +Set the following environment variable: + +```bash +# Required for LLM-enhanced display names (donated API key) +export APIPIE_DISPLAY_NAME_API_KEY="your-apipie-api-key" +``` + +### Behavior + +- **With API key**: Uses Claude Sonnet 4.5 via APIpie.ai to generate professional display names + - Example: `gpt-4o-2024-11-20` → `"GPT-4o (2024-11-20)"` + - Example: `claude-3-5-sonnet` → `"Claude 3.5 Sonnet"` + +- **Without API key or on failure**: Falls back to using the raw model ID as display name + - Example: `gpt-4o-2024-11-20` → `"gpt-4o-2024-11-20"` + - This ensures the tool **never breaks** due to API issues + +### Usage + +```bash +# Generate configuration with LLM-enhanced names +go run cmd/apipie/main.go + +# The generated config will be saved to: +# internal/providers/configs/apipie.json +``` \ No newline at end of file diff --git a/cmd/apipie/cache.go b/cmd/apipie/cache.go new file mode 100644 index 00000000..cd61faac --- /dev/null +++ b/cmd/apipie/cache.go @@ -0,0 +1,197 @@ +package main + +import ( + "crypto/sha256" + "database/sql" + "fmt" + "log" + "time" + + _ "modernc.org/sqlite" +) + +// CacheEntry represents a cached display name for a model +type CacheEntry struct { + ModelID string + DescriptionHash string + DisplayName string + CreatedAt time.Time +} + +// Cache manages the SQLite database for caching LLM-generated display names +type Cache struct { + db *sql.DB +} + +// NewCache creates a new cache instance and initializes the database +func NewCache(dbPath string) (*Cache, error) { + db, err := sql.Open("sqlite", dbPath) + if err != nil { + return nil, fmt.Errorf("failed to open database: %w", err) + } + + cache := &Cache{db: db} + if err := cache.initSchema(); err != nil { + db.Close() + return nil, fmt.Errorf("failed to initialize schema: %w", err) + } + + return cache, nil +} + +// Close closes the database connection +func (c *Cache) Close() error { + return c.db.Close() +} + +// initSchema creates the cache table if it doesn't exist +func (c *Cache) initSchema() error { + query := ` + CREATE TABLE IF NOT EXISTS display_name_cache ( + model_id TEXT NOT NULL, + description_hash TEXT NOT NULL, + display_name TEXT NOT NULL, + created_at DATETIME NOT NULL, + PRIMARY KEY (model_id, description_hash) + ); + + CREATE INDEX IF NOT EXISTS idx_model_id ON display_name_cache(model_id); + CREATE INDEX IF NOT EXISTS idx_created_at ON display_name_cache(created_at); + + CREATE TABLE IF NOT EXISTS reasoning_effort_cache ( + description_hash TEXT NOT NULL PRIMARY KEY, + has_reasoning_effort BOOLEAN NOT NULL, + created_at DATETIME NOT NULL + ); + + CREATE INDEX IF NOT EXISTS idx_reasoning_created_at ON reasoning_effort_cache(created_at); + ` + + _, err := c.db.Exec(query) + return err +} + + + +// hashDescription creates a SHA256 hash of the model description (legacy function) +// This allows us to detect when descriptions change and invalidate cache +func hashDescription(description string) string { + hash := sha256.Sum256([]byte(description)) + return fmt.Sprintf("%x", hash) +} + +// Get retrieves a cached display name for a model +// Returns empty string if not found or metadata has changed +func (c *Cache) Get(model Model) string { + metadataHash := hashModelMetadata(model) + + var displayName string + query := `SELECT display_name FROM display_name_cache + WHERE model_id = ? AND description_hash = ?` + + err := c.db.QueryRow(query, model.ID, metadataHash).Scan(&displayName) + if err != nil { + if err != sql.ErrNoRows { + log.Printf("Cache get error for model %s: %v", model.ID, err) + } + return "" + } + + return displayName +} + +// Set stores a display name in the cache +func (c *Cache) Set(model Model, displayName string) error { + metadataHash := hashModelMetadata(model) + + query := `INSERT OR REPLACE INTO display_name_cache + (model_id, description_hash, display_name, created_at) + VALUES (?, ?, ?, ?)` + + _, err := c.db.Exec(query, model.ID, metadataHash, displayName, time.Now()) + if err != nil { + return fmt.Errorf("failed to cache display name for model %s: %w", model.ID, err) + } + + return nil +} + +// GetStats returns cache statistics +func (c *Cache) GetStats() (int, error) { + var count int + err := c.db.QueryRow("SELECT COUNT(*) FROM display_name_cache").Scan(&count) + return count, err +} + +// CleanOldEntries removes cache entries older than the specified duration +// This helps keep the cache size manageable +func (c *Cache) CleanOldEntries(maxAge time.Duration) error { + cutoff := time.Now().Add(-maxAge) + + // Clean display name cache + query := `DELETE FROM display_name_cache WHERE created_at < ?` + result, err := c.db.Exec(query, cutoff) + if err != nil { + return fmt.Errorf("failed to clean old display name entries: %w", err) + } + + rowsAffected, _ := result.RowsAffected() + if rowsAffected > 0 { + log.Printf("Cleaned %d old display name cache entries", rowsAffected) + } + + // Clean reasoning effort cache + query = `DELETE FROM reasoning_effort_cache WHERE created_at < ?` + result, err = c.db.Exec(query, cutoff) + if err != nil { + return fmt.Errorf("failed to clean old reasoning effort entries: %w", err) + } + + rowsAffected, _ = result.RowsAffected() + if rowsAffected > 0 { + log.Printf("Cleaned %d old reasoning effort cache entries", rowsAffected) + } + + return nil +} + +// GetReasoningEffort retrieves cached reasoning effort analysis for a description +func (c *Cache) GetReasoningEffort(description string) (bool, bool) { + if description == "" { + return false, false + } + + hash := hashDescription(description) + + var hasEffort bool + err := c.db.QueryRow( + "SELECT has_reasoning_effort FROM reasoning_effort_cache WHERE description_hash = ?", + hash, + ).Scan(&hasEffort) + + if err != nil { + return false, false // Cache miss + } + + return hasEffort, true // Cache hit +} + +// SetReasoningEffort stores reasoning effort analysis result in cache +func (c *Cache) SetReasoningEffort(description string, hasEffort bool) error { + if description == "" { + return nil + } + + hash := hashDescription(description) + + _, err := c.db.Exec( + "INSERT OR REPLACE INTO reasoning_effort_cache (description_hash, has_reasoning_effort, created_at) VALUES (?, ?, ?)", + hash, hasEffort, time.Now(), + ) + + if err != nil { + return fmt.Errorf("failed to cache reasoning effort: %w", err) + } + + return nil +} \ No newline at end of file diff --git a/cmd/apipie/main.go b/cmd/apipie/main.go new file mode 100644 index 00000000..4281a88a --- /dev/null +++ b/cmd/apipie/main.go @@ -0,0 +1,731 @@ +// Package main provides a command-line tool to fetch models from APIpie +// and generate a configuration file for the provider. +// +// LLM-Enhanced Display Names: +// This tool uses APIpie.ai's LLM service to generate professional display names +// for models based on their IDs and descriptions. The API key is donated to +// improve the user experience of this open source project. +// +// API Key Configuration: +// Set APIPIE_DISPLAY_NAME_API_KEY environment variable to enable LLM-generated +// display names. This should be set in GitHub Actions secrets. +// +// Fallback Behavior: +// If the APIpie API key is not working or not provided, the tool will fall back +// to using the raw model ID as the display name. This ensures the tool never +// breaks due to API issues. +// +// Example usage: +// +// export APIPIE_DISPLAY_NAME_API_KEY="your-apipie-api-key" +// go run cmd/apipie/main.go +package main + +import ( + "bytes" + "context" + "crypto/sha256" + "encoding/json" + "fmt" + "io" + "log" + "net/http" + "os" + "slices" + "strconv" + "strings" + "time" + + "github.com/charmbracelet/catwalk/pkg/catwalk" +) + +// retryableHTTPRequest performs an HTTP request with exponential backoff retry for 502 errors +func retryableHTTPRequest(req *http.Request, operation string) (*http.Response, error) { + maxRetries := 3 + baseDelay := 1 * time.Second + + for attempt := 0; attempt < maxRetries; attempt++ { + client := &http.Client{Timeout: 30 * time.Second} + + resp, err := client.Do(req) + if err != nil { + if attempt == maxRetries-1 { + return nil, fmt.Errorf("%s failed after %d retries: %w", operation, maxRetries, err) + } + delay := baseDelay * time.Duration(1< 0 { + if model.MaxTokens >= 1000000 { + contextInfo = fmt.Sprintf(" (%dM tokens)", model.MaxTokens/1000000) + } else if model.MaxTokens >= 1000 { + contextInfo = fmt.Sprintf(" (%dK tokens)", model.MaxTokens/1000) + } else { + contextInfo = fmt.Sprintf(" (%d tokens)", model.MaxTokens) + } + } + + prompt += fmt.Sprintf(`[%d] Model ID: "%s" + Base Model: "%s" + Provider: "%s" + Route: "%s" + Pool: "%s" + Subtype: "%s" + Input Modalities: %s + Output Modalities: %s + Context Window: %s + Description: "%s" + +`, i+1, model.ID, model.Model, model.Provider, model.Route, model.Pool, model.Subtype, + inputMods, outputMods, strings.TrimSpace(contextInfo), strings.Split(model.Description, "\n")[0]) + } + + prompt += `NAMING RULES: +1. If one model has provider="pool", give it the simple canonical name (this is the meta-model) +2. For provider-specific variants, add provider name: "GPT-4 (OpenAI)", "GPT-4 (Azure)" +3. For multimodal variants, highlight capabilities: "GPT-4 Vision", "Claude 3.5 Sonnet (Vision)", "Gemini Pro (Audio)" +4. For context window differences, include size when significant: "Claude 3.5 Sonnet (200K)", "GPT-4 Turbo (128K)" +5. For feature variants, highlight differences: "GPT-4 Turbo", "Llama 3.1 Instruct", "Mistral 7B (Quantized)" +6. Keep names under 50 characters +7. Use proper capitalization and formatting +8. Make differences clear and concise +9. Prioritize: modalities > provider > context size > other features + +Generate names in this exact format (one per line): +[1] -> Display Name Here +[2] -> Display Name Here +etc.` + + reqBody := APIpieRequest{ + Messages: []APIpieMessage{ + { + Role: "user", + Content: prompt, + }, + }, + Model: "claude-sonnet-4-5", + MaxTokens: 300, + Temperature: 0.1, // Low temperature for consistent results + } + + jsonData, err := json.Marshal(reqBody) + if err != nil { + log.Printf("Failed to marshal APIpie request for group display name generation: %v", err) + return nil + } + + req, err := http.NewRequestWithContext( + context.Background(), + "POST", + "https://apipie.ai/v1/chat/completions", + bytes.NewBuffer(jsonData), + ) + if err != nil { + log.Printf("Failed to create APIpie request for group display name generation: %v", err) + return nil + } + + req.Header.Set("x-api-key", apiKey) + req.Header.Set("Content-Type", "application/json") + + resp, err := retryableHTTPRequest(req, "Group display name generation") + if err != nil { + log.Printf("APIpie API failed for group display name generation: %v", err) + return nil + } + defer resp.Body.Close() + + if resp.StatusCode != 200 { + body, _ := io.ReadAll(resp.Body) + log.Printf("APIpie API returned status %d for group display name generation: %s", resp.StatusCode, string(body)) + return nil + } + + var apipieResp APIpieResponse + if err := json.NewDecoder(resp.Body).Decode(&apipieResp); err != nil { + log.Printf("Failed to decode APIpie response for group display name generation: %v", err) + return nil + } + + if len(apipieResp.Choices) == 0 { + log.Printf("APIpie returned empty choices for group display name generation") + return nil + } + + // Parse the response to extract names + response := strings.TrimSpace(apipieResp.Choices[0].Message.Content) + return parseGroupNamesResponse(response, models) +} + +// parseGroupNamesResponse parses the LLM response and maps names to models +func parseGroupNamesResponse(response string, models []Model) map[string]string { + lines := strings.Split(response, "\n") + result := make(map[string]string) + + for _, line := range lines { + line = strings.TrimSpace(line) + if strings.Contains(line, "] ->") { + // Parse format: "[1] -> Display Name" + parts := strings.SplitN(line, "] ->", 2) + if len(parts) == 2 { + indexStr := strings.TrimPrefix(strings.TrimSpace(parts[0]), "[") + name := strings.TrimSpace(parts[1]) + + // Convert to 0-based index + if idx := parseIndex(indexStr); idx >= 0 && idx < len(models) { + model := models[idx] + key := getModelCacheKey(model) + if len(name) > 0 && len(name) <= 60 && !strings.Contains(name, "\n") { + result[key] = name + } + } + } + } + } + + return result +} + +// parseIndex converts string index to int, returns -1 if invalid +func parseIndex(s string) int { + if idx, err := strconv.Atoi(s); err == nil && idx > 0 { + return idx - 1 // Convert to 0-based + } + return -1 +} + +// createDisplayName generates a display name for a model using cache-first approach. +// This is used for individual models that don't have duplicates. +func createDisplayName(cache *Cache, model Model) string { + // Use the same prompt as group processing (for consistency) + result := createDisplayNamesForGroup(cache, []Model{model}) + key := getModelCacheKey(model) + if name, exists := result[key]; exists { + return name + } + // Fallback to model ID if something went wrong + return model.ID +} + +// createDisplayNamesForGroup generates display names for a group of models with the same ID +func createDisplayNamesForGroup(cache *Cache, models []Model) map[string]string { + result := make(map[string]string) + uncachedModels := []Model{} + + // Check cache for each model in the group + for _, model := range models { + if cachedName := cache.Get(model); cachedName != "" { + key := getModelCacheKey(model) + result[key] = cachedName + } else { + uncachedModels = append(uncachedModels, model) + } + } + + // If all models are cached, return cached results + if len(uncachedModels) == 0 { + return result + } + + // Generate names for uncached models as a group + if groupNames := generateDisplayNamesForGroup(uncachedModels); groupNames != nil { + // Cache successful results + for key, name := range groupNames { + result[key] = name + + // Find the model for this key to cache it + for _, model := range uncachedModels { + modelKey := getModelCacheKey(model) + if modelKey == key { + if err := cache.Set(model, name); err != nil { + log.Printf("Failed to cache group display name for %s: %v", model.ID, err) + } else { + log.Printf("Cached group LLM-generated name for %s: %s", model.ID, name) + } + break + } + } + } + } + + // For any remaining uncached models, use fallback + for _, model := range uncachedModels { + key := getModelCacheKey(model) + if _, exists := result[key]; !exists { + result[key] = model.ID // Fallback to model ID + } + } + + return result +} + +// getModelCacheKey generates a unique cache key for a model including all metadata +func getModelCacheKey(model Model) string { + return model.ID + "|" + hashModelMetadata(model) +} + +// hashModelMetadata creates a SHA256 hash of all differentiating model metadata +// This ensures models with same ID but different providers/routes/descriptions get separate cache entries +func hashModelMetadata(model Model) string { + // Include all metadata that could differentiate models with the same ID + metadata := fmt.Sprintf("%s|%s|%s|%s|%s|%s|%s|%s|%s|%s|%d", + model.Description, + model.Provider, + model.Route, + model.Pool, + model.Subtype, + model.InstructType, + model.Quantization, + model.Model, // Base model name + strings.Join(model.InputModalities, ","), // Input modalities (text, image, audio, etc.) + strings.Join(model.OutputModalities, ","), // Output modalities + model.MaxTokens, // Context window size + ) + hash := sha256.Sum256([]byte(metadata)) + return fmt.Sprintf("%x", hash) +} + +func getDefaultMaxTokens(model Model) int64 { + if model.MaxResponseTokens > 0 { + return model.MaxResponseTokens + } + if model.MaxTokens > 0 { + return model.MaxTokens / 4 // Conservative default + } + return 4096 // reasonable default +} + +func getContextWindow(model Model) int64 { + if model.MaxTokens > 0 { + return model.MaxTokens + } + return 32768 +} + +// This is used to generate the apipie.json config file. +func main() { + // Initialize cache + cache, err := NewCache("cmd/apipie/cache.db") + if err != nil { + log.Fatal("Error initializing cache:", err) + } + defer cache.Close() + + // Clean old cache entries (older than 30 days) + if err := cache.CleanOldEntries(30 * 24 * time.Hour); err != nil { + log.Printf("Warning: Failed to clean old cache entries: %v", err) + } + + // Get cache stats + if cacheCount, err := cache.GetStats(); err == nil { + log.Printf("Cache initialized with %d entries", cacheCount) + } + + modelsResp, err := fetchAPIpieModels() + if err != nil { + log.Fatal("Error fetching APIpie models:", err) + } + + apipieProvider := catwalk.Provider{ + Name: "APIpie", + ID: "apipie", + APIKey: "$APIPIE_API_KEY", + APIEndpoint: "https://apipie.ai/v1", + Type: catwalk.TypeOpenAI, + DefaultLargeModelID: "claude-sonnet-4-5", + DefaultSmallModelID: "claude-haiku-4-5", + Models: []catwalk.Model{}, + } + + // Group models by ID to handle duplicates intelligently + modelGroups := make(map[string][]Model) + for _, model := range modelsResp.Data { + if isTextModel(model) { + modelGroups[model.ID] = append(modelGroups[model.ID], model) + } + } + + // Process each group + for modelID, models := range modelGroups { + var displayNames map[string]string + + if len(models) == 1 { + // Single model - use individual processing + model := models[0] + displayName := createDisplayName(cache, model) + key := getModelCacheKey(model) + displayNames = map[string]string{key: displayName} + } else { + // Multiple models with same ID - use group processing + log.Printf("Processing %d variants of model %s", len(models), modelID) + displayNames = createDisplayNamesForGroup(cache, models) + } + + // Create catwalk.Model entries for each model + for _, model := range models { + key := getModelCacheKey(model) + displayName, exists := displayNames[key] + if !exists { + displayName = model.ID // Fallback + } + + // Parse and convert costs to per-million-tokens + var costPer1MIn, costPer1MOut float64 + + // Confirmed pricing is already per-million-tokens, advertised is per-token + if model.Pricing.Confirmed.InputCost != "" { + costPer1MIn, _ = strconv.ParseFloat(model.Pricing.Confirmed.InputCost, 64) + } else if model.Pricing.Advertised.InputCostPerToken != "" { + inputCostPerToken, _ := strconv.ParseFloat(model.Pricing.Advertised.InputCostPerToken, 64) + costPer1MIn = inputCostPerToken * 1_000_000 + } + + if model.Pricing.Confirmed.OutputCost != "" { + costPer1MOut, _ = strconv.ParseFloat(model.Pricing.Confirmed.OutputCost, 64) + } else if model.Pricing.Advertised.OutputCostPerToken != "" { + outputCostPerToken, _ := strconv.ParseFloat(model.Pricing.Advertised.OutputCostPerToken, 64) + costPer1MOut = outputCostPerToken * 1_000_000 + } + + m := catwalk.Model{ + ID: model.ID, + Name: displayName, + CostPer1MIn: costPer1MIn, + CostPer1MOut: costPer1MOut, + CostPer1MInCached: 0, + CostPer1MOutCached: 0, + ContextWindow: model.MaxTokens, + DefaultMaxTokens: getDefaultMaxTokens(model), + CanReason: canReason(model), + HasReasoningEffort: hasReasoningEfforts(cache, model), + SupportsImages: supportsImages(model), + } + + apipieProvider.Models = append(apipieProvider.Models, m) + fmt.Printf("Added model %s (%s) with context window %d\n", model.ID, displayName, m.ContextWindow) + } + } + + // Sort models by name for consistency + slices.SortFunc(apipieProvider.Models, func(a catwalk.Model, b catwalk.Model) int { + return strings.Compare(a.Name, b.Name) + }) + + // Save the JSON in internal/providers/configs/apipie.json + data, err := json.MarshalIndent(apipieProvider, "", " ") + if err != nil { + log.Fatal("Error marshaling APIpie provider:", err) + } + + // Write to file + if err := os.WriteFile("internal/providers/configs/apipie.json", data, 0o600); err != nil { + log.Fatal("Error writing APIpie provider config:", err) + } + + // Final cache stats + if finalCount, err := cache.GetStats(); err == nil { + log.Printf("Cache now contains %d entries", finalCount) + } + + fmt.Printf("Successfully generated APIpie provider config with %d models\n", len(apipieProvider.Models)) +} diff --git a/go.mod b/go.mod index 6dafe8f7..c7b29b6d 100644 --- a/go.mod +++ b/go.mod @@ -7,12 +7,22 @@ require github.com/prometheus/client_golang v1.23.2 require ( github.com/beorn7/perks v1.0.1 // indirect github.com/cespare/xxhash/v2 v2.3.0 // indirect + github.com/dustin/go-humanize v1.0.1 // indirect + github.com/google/uuid v1.6.0 // indirect github.com/kr/text v0.2.0 // indirect + github.com/mattn/go-isatty v0.0.20 // indirect github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 // indirect + github.com/ncruces/go-strftime v0.1.9 // indirect github.com/prometheus/client_model v0.6.2 // indirect github.com/prometheus/common v0.66.1 // indirect github.com/prometheus/procfs v0.16.1 // indirect + github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect go.yaml.in/yaml/v2 v2.4.2 // indirect + golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b // indirect golang.org/x/sys v0.35.0 // indirect google.golang.org/protobuf v1.36.8 // indirect + modernc.org/libc v1.66.3 // indirect + modernc.org/mathutil v1.7.1 // indirect + modernc.org/memory v1.11.0 // indirect + modernc.org/sqlite v1.39.0 // indirect ) diff --git a/go.sum b/go.sum index d6b8ca98..6c5b1f25 100644 --- a/go.sum +++ b/go.sum @@ -5,8 +5,12 @@ github.com/cespare/xxhash/v2 v2.3.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XL github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY= +github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto= github.com/google/go-cmp v0.7.0 h1:wk8382ETsv4JYUZwIsn6YpYiWiBsYLSJiTsyBybVuN8= github.com/google/go-cmp v0.7.0/go.mod h1:pXiqmnSA92OHEEa9HXL2W4E7lf9JzCmGVUdgjX3N/iU= +github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= +github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo= github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ= github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE= @@ -15,8 +19,12 @@ github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= +github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= +github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA= github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ= +github.com/ncruces/go-strftime v0.1.9 h1:bY0MQC28UADQmHmaF5dgpLmImcShSi2kHU9XLdhx/f4= +github.com/ncruces/go-strftime v0.1.9/go.mod h1:Fwc5htZGVVkseilnfgOVb9mKy6w1naJmn9CehxcKcls= github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= github.com/prometheus/client_golang v1.23.2 h1:Je96obch5RDVy3FDMndoUsjAhG5Edi49h0RJWRi/o0o= @@ -27,6 +35,8 @@ github.com/prometheus/common v0.66.1 h1:h5E0h5/Y8niHc5DlaLlWLArTQI7tMrsfQjHV+d9Z github.com/prometheus/common v0.66.1/go.mod h1:gcaUsgf3KfRSwHY4dIMXLPV0K/Wg1oZ8+SbZk/HH/dA= github.com/prometheus/procfs v0.16.1 h1:hZ15bTNuirocR6u0JZ6BAHHmwS1p8B4P6MRqxtzMyRg= github.com/prometheus/procfs v0.16.1/go.mod h1:teAbpZRB1iIAJYREa1LsoWUXykVXA1KlTmWl8x/U+Is= +github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE= +github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo= github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjRBZyWFQ= github.com/rogpeppe/go-internal v1.10.0/go.mod h1:UQnix2H7Ngw/k4C5ijL5+65zddjncjaFoBhdsK/akog= github.com/stretchr/testify v1.11.1 h1:7s2iGBzp5EwR7/aIZr8ao5+dra3wiQyKjjFuvgVKu7U= @@ -35,6 +45,9 @@ go.uber.org/goleak v1.3.0 h1:2K3zAYmnTNqV73imy9J1T3WC+gmCePx2hEGkimedGto= go.uber.org/goleak v1.3.0/go.mod h1:CoHD4mav9JJNrW/WLlf7HGZPjdw8EucARQHekz1X6bE= go.yaml.in/yaml/v2 v2.4.2 h1:DzmwEr2rDGHl7lsFgAHxmNz/1NlQ7xLIrlN2h5d1eGI= go.yaml.in/yaml/v2 v2.4.2/go.mod h1:081UH+NErpNdqlCXm3TtEran0rJZGxAYx9hb/ELlsPU= +golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b h1:M2rDM6z3Fhozi9O7NWsxAkg/yqS/lQJ6PmkyIV3YP+o= +golang.org/x/exp v0.0.0-20250620022241-b7579e27df2b/go.mod h1:3//PLf8L/X+8b4vuAfHzxeRUl04Adcb341+IGKfnqS8= +golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.35.0 h1:vz1N37gP5bs89s7He8XuIYXpyY0+QlsKmzipCbUtyxI= golang.org/x/sys v0.35.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= google.golang.org/protobuf v1.36.8 h1:xHScyCOEuuwZEc6UtSOvPbAT4zRh0xcNRYekJwfqyMc= @@ -44,3 +57,11 @@ gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntN gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q= gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= +modernc.org/libc v1.66.3 h1:cfCbjTUcdsKyyZZfEUKfoHcP3S0Wkvz3jgSzByEWVCQ= +modernc.org/libc v1.66.3/go.mod h1:XD9zO8kt59cANKvHPXpx7yS2ELPheAey0vjIuZOhOU8= +modernc.org/mathutil v1.7.1 h1:GCZVGXdaN8gTqB1Mf/usp1Y/hSqgI2vAGGP4jZMCxOU= +modernc.org/mathutil v1.7.1/go.mod h1:4p5IwJITfppl0G4sUEDtCr4DthTaT47/N3aT6MhfgJg= +modernc.org/memory v1.11.0 h1:o4QC8aMQzmcwCK3t3Ux/ZHmwFPzE6hf2Y5LbkRs+hbI= +modernc.org/memory v1.11.0/go.mod h1:/JP4VbVC+K5sU2wZi9bHoq2MAkCnrt2r98UGeSK7Mjw= +modernc.org/sqlite v1.39.0 h1:6bwu9Ooim0yVYA7IZn9demiQk/Ejp0BtTjBWFLymSeY= +modernc.org/sqlite v1.39.0/go.mod h1:cPTJYSlgg3Sfg046yBShXENNtPrWrDX8bsbAQBzgQ5E= diff --git a/internal/providers/configs/apipie.json b/internal/providers/configs/apipie.json new file mode 100644 index 00000000..ab1bf49e --- /dev/null +++ b/internal/providers/configs/apipie.json @@ -0,0 +1,13960 @@ +{ + "name": "APIpie", + "id": "apipie", + "api_key": "$APIPIE_API_KEY", + "api_endpoint": "https://apipie.ai/v1", + "type": "openai", + "default_large_model_id": "claude-sonnet-4-5", + "default_small_model_id": "claude-haiku-4-5", + "models": [ + { + "id": "afm-4-5b", + "name": "AFM-4.5B (65K)", + "cost_per_1m_in": 0.04810714, + "cost_per_1m_out": 0.14995091, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 65536, + "default_max_tokens": 65536, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "aion", + "name": "Aion (32K)", + "cost_per_1m_in": 4.51315789, + "cost_per_1m_out": -0.28868291, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "aion-1-0", + "name": "Aion 1.0 (OpenRouter)", + "cost_per_1m_in": 4.51315789, + "cost_per_1m_out": 7.99050587, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 32768, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "aion-1-0-mini", + "name": "Aion 1.0 Mini (OpenRouter)", + "cost_per_1m_in": 1.00970395, + "cost_per_1m_out": 1.36152746, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 32768, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "aion-rp-llama-3-1-8b", + "name": "Aion RP Llama 3.1 8B (OpenRouter)", + "cost_per_1m_in": 0.20082034, + "cost_per_1m_out": 0.19995844, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "andromeda-alpha", + "name": "Andromeda Alpha (OpenRouter)", + "cost_per_1m_in": 8.07434053, + "cost_per_1m_out": 9.85069544, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 128000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "anubis-70b-v1-1", + "name": "Anubis 70B v1.1 (131K)", + "cost_per_1m_in": 0.6508, + "cost_per_1m_out": 0.99993739, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "anubis-pro-105b-v1", + "name": "Anubis Pro 105B v1 (OpenRouter)", + "cost_per_1m_in": 0.5008, + "cost_per_1m_out": 0.9999568, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "text-babbage-002", + "name": "Babbage 002 (EdenAI)", + "cost_per_1m_in": 0.39999999999999997, + "cost_per_1m_out": 0.39999999999999997, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 0, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "babbage-002", + "name": "Babbage-002 (OpenAI)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 0, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "chatgpt-4o", + "name": "ChatGPT-4o", + "cost_per_1m_in": 5.01229508, + "cost_per_1m_out": 14.99967872, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "chatgpt-4o-latest", + "name": "ChatGPT-4o (OpenAI)", + "cost_per_1m_in": 5.01229508, + "cost_per_1m_out": 14.99968033, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "chatgpt-4o-latest", + "name": "ChatGPT-4o (OpenRouter)", + "cost_per_1m_in": 5.01229508, + "cost_per_1m_out": 14.99968033, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "chatx", + "name": "ChatX", + "cost_per_1m_in": 0.0001672, + "cost_per_1m_out": 0.40236673, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 2048, + "default_max_tokens": 512, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "chatx-fast", + "name": "ChatX Fast", + "cost_per_1m_in": 10.42215909, + "cost_per_1m_out": 2.44127358, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 8000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "cheap", + "name": "Cheap", + "cost_per_1m_in": 0.00020265, + "cost_per_1m_out": 2.41207177, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 8000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "cheap-fast", + "name": "Cheap Fast", + "cost_per_1m_in": 0.00020265, + "cost_per_1m_out": 2.99906781, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 8000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "claude-2", + "name": "Claude 2", + "cost_per_1m_in": 8.01814059, + "cost_per_1m_out": 23.99830948, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "claude-v2", + "name": "Claude 2 (v2)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 0, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "claude-v2", + "name": "Claude 2 (v2:1)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 0, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "claude-3-haiku-20240307", + "name": "Claude 3 Haiku", + "cost_per_1m_in": 0.25090843, + "cost_per_1m_out": 1.24995942, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "claude-3-haiku", + "name": "Claude 3 Haiku", + "cost_per_1m_in": 0.00025436, + "cost_per_1m_out": 1.26020973, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "claude-3-haiku-20240307-v1", + "name": "Claude 3 Haiku (Bedrock)", + "cost_per_1m_in": 0.25090843, + "cost_per_1m_out": 1.2499641, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "claude-3-haiku", + "name": "Claude 3 Haiku (OpenRouter Vision)", + "cost_per_1m_in": 0.25090843, + "cost_per_1m_out": 1.24996443, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "claude-3-opus", + "name": "Claude 3 Opus", + "cost_per_1m_in": 15.05450581, + "cost_per_1m_out": 74.99779603, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "claude-3-opus-20240229", + "name": "Claude 3 Opus", + "cost_per_1m_in": 15.05450581, + "cost_per_1m_out": 74.99793117, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "claude-3-opus-latest", + "name": "Claude 3 Opus (Anthropic)", + "cost_per_1m_in": 15.05450581, + "cost_per_1m_out": 74.99756084, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "claude-3-opus-20240229-v1", + "name": "Claude 3 Opus (Bedrock)", + "cost_per_1m_in": 15.05450581, + "cost_per_1m_out": 74.99772162, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "claude-3-opus-latest", + "name": "Claude 3 Opus (EdenAI)", + "cost_per_1m_in": 15.05450581, + "cost_per_1m_out": 74.99752246, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "claude-3-opus", + "name": "Claude 3 Opus (OpenRouter)", + "cost_per_1m_in": 15.05450581, + "cost_per_1m_out": 74.99783641, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "claude-3-sonnet-20240229-v1", + "name": "Claude 3 Sonnet (Bedrock)", + "cost_per_1m_in": 3.01090116, + "cost_per_1m_out": 14.99954868, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "claude-3-5-haiku", + "name": "Claude 3.5 Haiku", + "cost_per_1m_in": 0.25090843, + "cost_per_1m_out": 5.03849087, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "claude-3-5-haiku-20241022", + "name": "Claude 3.5 Haiku (Anthropic)", + "cost_per_1m_in": 0.80290698, + "cost_per_1m_out": 3.99981655, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "claude-3-5-haiku-20241022-v1", + "name": "Claude 3.5 Haiku (Bedrock)", + "cost_per_1m_in": 0.80290698, + "cost_per_1m_out": 3.99986308, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "claude-3-5-haiku-latest", + "name": "Claude 3.5 Haiku (EdenAI)", + "cost_per_1m_in": 1.00363372, + "cost_per_1m_out": 4.99978029, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "claude-3-5-haiku-20241022", + "name": "Claude 3.5 Haiku (OpenRouter)", + "cost_per_1m_in": 0.80290698, + "cost_per_1m_out": 3.999829, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "claude-3-5-haiku", + "name": "Claude 3.5 Haiku (OpenRouter)", + "cost_per_1m_in": 1.00363372, + "cost_per_1m_out": 4.99977612, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "claude-3-5-haiku", + "name": "Claude 3.5 Haiku (Vision)", + "cost_per_1m_in": 1.00363372, + "cost_per_1m_out": 4.99977612, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "claude-3-5-sonnet", + "name": "Claude 3.5 Sonnet", + "cost_per_1m_in": 3.01090116, + "cost_per_1m_out": 14.9996226, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "claude-3-5-sonnet-20240620", + "name": "Claude 3.5 Sonnet (Anthropic)", + "cost_per_1m_in": 3.01090116, + "cost_per_1m_out": 14.99963095, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "claude-3-5-sonnet-latest", + "name": "Claude 3.5 Sonnet (Anthropic)", + "cost_per_1m_in": 3.01090116, + "cost_per_1m_out": 14.99969654, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "claude-3-5-sonnet-20240620-v1", + "name": "Claude 3.5 Sonnet (Bedrock)", + "cost_per_1m_in": 3.01090116, + "cost_per_1m_out": 14.99966811, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "claude-sonnet-4-5-20250929", + "name": "Claude 3.5 Sonnet (EdenAI)", + "cost_per_1m_in": 3.01090116, + "cost_per_1m_out": 14.99971342, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "claude-3-5-sonnet-20240620", + "name": "Claude 3.5 Sonnet (OpenRouter)", + "cost_per_1m_in": 3.01090116, + "cost_per_1m_out": 14.99962007, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "claude-3-5-sonnet-latest", + "name": "Claude 3.5 Sonnet (Vision)", + "cost_per_1m_in": 3.01090116, + "cost_per_1m_out": 14.99969159, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "claude-3-5-sonnet-20241022", + "name": "Claude 3.5 Sonnet (Vision)", + "cost_per_1m_in": 3.01090116, + "cost_per_1m_out": 14.99966616, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "claude-3-5-sonnet", + "name": "Claude 3.5 Sonnet (Vision)", + "cost_per_1m_in": 3.01090116, + "cost_per_1m_out": 14.99960906, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "claude-3-5-sonnet-20241022-v2", + "name": "Claude 3.5 Sonnet v2 (Bedrock)", + "cost_per_1m_in": 3.01090116, + "cost_per_1m_out": 14.9996175, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "claude-3-7-sonnet", + "name": "Claude 3.7 Sonnet", + "cost_per_1m_in": 0.00300872, + "cost_per_1m_out": 15.10990377, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "claude-3-7-sonnet-latest", + "name": "Claude 3.7 Sonnet (Anthropic)", + "cost_per_1m_in": 3.01090116, + "cost_per_1m_out": 14.99953688, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "claude-3-7-sonnet-20250219", + "name": "Claude 3.7 Sonnet (Anthropic)", + "cost_per_1m_in": 3.01090116, + "cost_per_1m_out": 14.9994144, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 128000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "claude-3-7-sonnet-20250219-v1", + "name": "Claude 3.7 Sonnet (Bedrock)", + "cost_per_1m_in": 3.01090116, + "cost_per_1m_out": 14.99943764, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 8192, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "claude-3-7-sonnet-latest", + "name": "Claude 3.7 Sonnet (DeepInfra)", + "cost_per_1m_in": 3.31199128, + "cost_per_1m_out": 16.49947777, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "claude-3-7-sonnet-latest", + "name": "Claude 3.7 Sonnet (EdenAI PDF)", + "cost_per_1m_in": 3.01090116, + "cost_per_1m_out": 14.99941319, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "claude-3-7-sonnet-20250219", + "name": "Claude 3.7 Sonnet (EdenAI)", + "cost_per_1m_in": 3.00545455, + "cost_per_1m_out": 14.99969501, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "claude-3-7-sonnet", + "name": "Claude 3.7 Sonnet (Extended Thinking)", + "cost_per_1m_in": 3.01090116, + "cost_per_1m_out": 14.99950449, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 64000, + "can_reason": true, + "has_reasoning_efforts": true, + "supports_attachments": true + }, + { + "id": "claude-3-7-sonnet", + "name": "Claude 3.7 Sonnet (OpenRouter)", + "cost_per_1m_in": 3.01090116, + "cost_per_1m_out": 14.99950449, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 64000, + "can_reason": true, + "has_reasoning_efforts": true, + "supports_attachments": true + }, + { + "id": "claude-4-opus", + "name": "Claude 4 Opus (DeepInfra)", + "cost_per_1m_in": 16.5599564, + "cost_per_1m_out": 82.49844113, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "claude-4-sonnet", + "name": "Claude 4 Sonnet (DeepInfra)", + "cost_per_1m_in": 3.31199128, + "cost_per_1m_out": 16.49968823, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "claude-haiku-4-5", + "name": "Claude Haiku 4.5", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "claude-haiku-4-5-20251001-v1", + "name": "Claude Haiku 4.5 (Bedrock Global)", + "cost_per_1m_in": 1.10399709, + "cost_per_1m_out": 5.49989608, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 8192, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "claude-haiku-4-5-20251001-v1", + "name": "Claude Haiku 4.5 (Bedrock US, Vision)", + "cost_per_1m_in": 1.10399709, + "cost_per_1m_out": 5.49989608, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 8192, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "claude-haiku-4-5", + "name": "Claude Haiku 4.5 (OpenRouter)", + "cost_per_1m_in": 1.00363372, + "cost_per_1m_out": 4.99990552, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 64000, + "can_reason": true, + "has_reasoning_efforts": true, + "supports_attachments": true + }, + { + "id": "claude-instant-v1", + "name": "Claude Instant v1 (Bedrock)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 0, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "claude-opus-4", + "name": "Claude Opus 4", + "cost_per_1m_in": 0.01505814, + "cost_per_1m_out": 75.40207274, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "claude-opus-4-20250514", + "name": "Claude Opus 4 (Anthropic)", + "cost_per_1m_in": 15.05450581, + "cost_per_1m_out": 74.99858285, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 32000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "claude-opus-4-20250514-v1", + "name": "Claude Opus 4 (Bedrock)", + "cost_per_1m_in": 15.05450581, + "cost_per_1m_out": 74.99858285, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 32000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "claude-opus-4-20250514", + "name": "Claude Opus 4 (EdenAI)", + "cost_per_1m_in": 15.05450581, + "cost_per_1m_out": 74.99858285, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "claude-opus-4", + "name": "Claude Opus 4 (OpenRouter)", + "cost_per_1m_in": 15.05450581, + "cost_per_1m_out": 74.99858285, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 32000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "claude-opus-4-1", + "name": "Claude Opus 4.1", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "claude-opus-4-1-20250805", + "name": "Claude Opus 4.1 (Anthropic)", + "cost_per_1m_in": 15.05450581, + "cost_per_1m_out": 74.99858285, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 32000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "claude-opus-4-1-20250805-v1", + "name": "Claude Opus 4.1 (Bedrock)", + "cost_per_1m_in": 15.05450581, + "cost_per_1m_out": 74.99858285, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 32000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "claude-opus-4-1-20250805", + "name": "Claude Opus 4.1 (EdenAI)", + "cost_per_1m_in": 15.05450581, + "cost_per_1m_out": 74.99858285, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "claude-opus-4-1", + "name": "Claude Opus 4.1 (OpenRouter)", + "cost_per_1m_in": 15.05450581, + "cost_per_1m_out": 74.99858285, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 32000, + "can_reason": true, + "has_reasoning_efforts": true, + "supports_attachments": true + }, + { + "id": "claude-sonnet-4", + "name": "Claude Sonnet 4", + "cost_per_1m_in": 0.00300872, + "cost_per_1m_out": 15.07792177, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "claude-sonnet-4-20250514", + "name": "Claude Sonnet 4 (Anthropic, 1M)", + "cost_per_1m_in": 3.01090116, + "cost_per_1m_out": 14.99970259, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1000000, + "default_max_tokens": 64000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "claude-sonnet-4-20250514-v1", + "name": "Claude Sonnet 4 (Bedrock Global)", + "cost_per_1m_in": 3.01090116, + "cost_per_1m_out": 14.99971657, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1000000, + "default_max_tokens": 64000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "claude-sonnet-4-20250514-v1", + "name": "Claude Sonnet 4 (Bedrock US, Vision)", + "cost_per_1m_in": 3.01090116, + "cost_per_1m_out": 14.99971657, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1000000, + "default_max_tokens": 64000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "claude-sonnet-4-20250514", + "name": "Claude Sonnet 4 (EdenAI, 200K)", + "cost_per_1m_in": 3.01090116, + "cost_per_1m_out": 14.99971657, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "claude-sonnet-4", + "name": "Claude Sonnet 4 (OpenRouter)", + "cost_per_1m_in": 3.01090116, + "cost_per_1m_out": 14.999716, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1000000, + "default_max_tokens": 64000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "claude-sonnet-4-5", + "name": "Claude Sonnet 4.5", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "claude-sonnet-4-5-20250929-v1", + "name": "Claude Sonnet 4.5 (Global)", + "cost_per_1m_in": 3.31199128, + "cost_per_1m_out": 16.49967892, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 64000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "claude-sonnet-4-5", + "name": "Claude Sonnet 4.5 (OpenRouter)", + "cost_per_1m_in": 3.01090116, + "cost_per_1m_out": 14.99968647, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1000000, + "default_max_tokens": 64000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "claude-sonnet-4-5-20250929-v1", + "name": "Claude Sonnet 4.5 (US Regions)", + "cost_per_1m_in": 3.31199128, + "cost_per_1m_out": 16.49967892, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 64000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "codegemma-7b-it", + "name": "CodeGemma 7B Instruct (DeepInfra)", + "cost_per_1m_in": 0.04010578, + "cost_per_1m_out": 0.12999725, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8192, + "default_max_tokens": 2048, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "codellama-7b-instruct-solidity", + "name": "CodeLlama 7B Solidity (4K)", + "cost_per_1m_in": 0.80193705, + "cost_per_1m_out": 1.19966414, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 4096, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "coder-large", + "name": "Coder-Large (OpenRouter)", + "cost_per_1m_in": 0.50064309, + "cost_per_1m_out": 0.79995912, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "codestral", + "name": "Codestral", + "cost_per_1m_in": 1.0024077, + "cost_per_1m_out": 0.88082758, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8191, + "default_max_tokens": 8191, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "codestral-2405", + "name": "Codestral 2405 (EdenAI)", + "cost_per_1m_in": 1, + "cost_per_1m_out": 3, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8191, + "default_max_tokens": 8191, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "codestral-2501", + "name": "Codestral 2501 (OpenRouter)", + "cost_per_1m_in": 0.30072231, + "cost_per_1m_out": 0.89997947, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 262144, + "default_max_tokens": 262144, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "codestral-2508", + "name": "Codestral 2508 (OpenRouter)", + "cost_per_1m_in": 0.30072231, + "cost_per_1m_out": 0.8999837, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 256000, + "default_max_tokens": 256000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "codestral-latest", + "name": "Codestral Latest (EdenAI)", + "cost_per_1m_in": 1.0024077, + "cost_per_1m_out": 2.99993988, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32000, + "default_max_tokens": 8191, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "codestral-mamba-latest", + "name": "Codestral Mamba (EdenAI)", + "cost_per_1m_in": 0.25, + "cost_per_1m_out": 0.25, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 256000, + "default_max_tokens": 256000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "codex-mini", + "name": "Codex Mini", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 100000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "codex-mini-latest", + "name": "Codex Mini (OpenAI)", + "cost_per_1m_in": 1.5, + "cost_per_1m_out": 6, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 100000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "codex-mini", + "name": "Codex Mini (OpenRouter)", + "cost_per_1m_in": 1.5, + "cost_per_1m_out": 6, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 100000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "cogito-v2-preview-llama-109b-moe", + "name": "Cogito v2 109B MoE (Together)", + "cost_per_1m_in": 0.18047929, + "cost_per_1m_out": 0.58997689, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32767, + "default_max_tokens": 32767, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "cogito-v2-preview-llama-109b-moe", + "name": "Cogito v2 109B MoE (Vision)", + "cost_per_1m_in": 0.18047929, + "cost_per_1m_out": 0.58997697, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32767, + "default_max_tokens": 32767, + "can_reason": true, + "has_reasoning_efforts": true, + "supports_attachments": true + }, + { + "id": "cogito-v2-preview-llama-405b", + "name": "Cogito v2 405B (OpenRouter)", + "cost_per_1m_in": 3.50285714, + "cost_per_1m_out": 3.49985151, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "cogito-v2-preview-deepseek-671b", + "name": "Cogito v2 671B (Together)", + "cost_per_1m_in": 1.2510305, + "cost_per_1m_out": 1.24997728, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 163840, + "default_max_tokens": 2048, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "cogito-v2-preview-deepseek-671b", + "name": "Cogito v2 671B Reasoning (OpenRouter)", + "cost_per_1m_in": 1.2510305, + "cost_per_1m_out": 1.24996744, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 163840, + "default_max_tokens": 163840, + "can_reason": true, + "has_reasoning_efforts": true, + "supports_attachments": false + }, + { + "id": "cogito-v2-preview-llama-70b", + "name": "Cogito v2 Preview 70B (OpenRouter)", + "cost_per_1m_in": 0.88071837, + "cost_per_1m_out": 0.87996493, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "cogito-2-deepseek-671b", + "name": "Cogito-2 DeepSeek 671B", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 163840, + "default_max_tokens": 2048, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "cogito-2-llama-109b-moe", + "name": "Cogito-2 Llama 109B MoE", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32767, + "default_max_tokens": 32767, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "command", + "name": "Command", + "cost_per_1m_in": 1.00160772, + "cost_per_1m_out": 10.02777658, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 4096, + "default_max_tokens": 4000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "command", + "name": "Command (EdenAI)", + "cost_per_1m_in": 1, + "cost_per_1m_out": 2, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 4096, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "command", + "name": "Command (OpenRouter)", + "cost_per_1m_in": 1.00160772, + "cost_per_1m_out": 1.9998968, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 4096, + "default_max_tokens": 4000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "command-a", + "name": "Command A (256K)", + "cost_per_1m_in": 2.5, + "cost_per_1m_out": 10, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 256000, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "command-light", + "name": "Command Light", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 4096, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "command-light", + "name": "Command Light (EdenAI)", + "cost_per_1m_in": 0.3, + "cost_per_1m_out": 0.6, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 4096, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "command-light-nightly", + "name": "Command Light Nightly (EdenAI)", + "cost_per_1m_in": 0.0004906, + "cost_per_1m_out": 3.92148526, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 4096, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "command-nightly", + "name": "Command Nightly (EdenAI)", + "cost_per_1m_in": 1.99018806, + "cost_per_1m_out": 1.98601, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 4096, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "command-r-v1", + "name": "Command R (Bedrock)", + "cost_per_1m_in": 0.5012087, + "cost_per_1m_out": 1.49995486, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "command-r-08-2024", + "name": "Command R (EdenAI)", + "cost_per_1m_in": 0.15, + "cost_per_1m_out": 0.6, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "command-r-08-2024", + "name": "Command R (OpenRouter)", + "cost_per_1m_in": 0.15048348, + "cost_per_1m_out": 0.59998741, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 4000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "command-r7b", + "name": "Command R 7B", + "cost_per_1m_in": 0.15, + "cost_per_1m_out": 0.14662207, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 4000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "command-r-plus", + "name": "Command R+", + "cost_per_1m_in": 0.00301127, + "cost_per_1m_out": 10.05287741, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 4000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "command-r-plus-v1", + "name": "Command R+ (Bedrock)", + "cost_per_1m_in": 3.01208703, + "cost_per_1m_out": 14.99978222, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "command-r-plus", + "name": "Command R+ (EdenAI)", + "cost_per_1m_in": 2.5, + "cost_per_1m_out": 10, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "command-r-plus", + "name": "Command R+ (OpenRouter)", + "cost_per_1m_in": 3.01208703, + "cost_per_1m_out": 14.99978243, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 4000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "command-r-plus-04-2024", + "name": "Command R+ (OpenRouter)", + "cost_per_1m_in": 3.01208703, + "cost_per_1m_out": 14.99978243, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 4000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "command-r-plus-08-2024", + "name": "Command R+ 08-2024 (EdenAI)", + "cost_per_1m_in": 2.5, + "cost_per_1m_out": 10, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "command-r-plus-08-2024", + "name": "Command R+ 08-2024 (OpenRouter)", + "cost_per_1m_in": 2.50805802, + "cost_per_1m_out": 9.99981869, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 4000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "command-r7b-12-2024", + "name": "Command R7B (EdenAI)", + "cost_per_1m_in": 0.15, + "cost_per_1m_out": 0.0375, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "command-r7b-12-2024", + "name": "Command R7B (OpenRouter)", + "cost_per_1m_in": 0.03750201, + "cost_per_1m_out": 0.15000712, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 4000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "command-r", + "name": "Command-R", + "cost_per_1m_in": 0.5, + "cost_per_1m_out": 1.5, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 4000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "command-r", + "name": "Command-R (EdenAI)", + "cost_per_1m_in": 0.15, + "cost_per_1m_out": 0.6, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "command-r-03-2024", + "name": "Command-R (EdenAI)", + "cost_per_1m_in": 0.6, + "cost_per_1m_out": 0.6, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 0, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "command-r", + "name": "Command-R (OpenRouter)", + "cost_per_1m_in": 0.5012087, + "cost_per_1m_out": 1.4999619, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 4000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "command-r-03-2024", + "name": "Command-R (OpenRouter)", + "cost_per_1m_in": 0.5012087, + "cost_per_1m_out": 1.49995824, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 4000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "computer-use", + "name": "Computer Use", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "computer-use-preview", + "name": "Computer Use Preview (OpenAI)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 0, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "computer-use-preview-2025-03-11", + "name": "Computer Use Preview (OpenAI)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 0, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "cydonia-24b-v4-1", + "name": "Cydonia 24B v4.1 (OpenRouter)", + "cost_per_1m_in": 0.30035211, + "cost_per_1m_out": 0.49993099, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "cypher-alpha", + "name": "Cypher Alpha (1M)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1000000, + "default_max_tokens": 10000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "davinci-002", + "name": "Davinci-002 (OpenAI)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 0, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepcoder-14b", + "name": "DeepCoder 14B", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 96000, + "default_max_tokens": 96000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepcoder-14b-preview", + "name": "DeepCoder 14B Preview", + "cost_per_1m_in": 0.015, + "cost_per_1m_out": 0.015, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 96000, + "default_max_tokens": 96000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepcoder-14b-preview", + "name": "DeepCoder 14B Preview (Free)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 96000, + "default_max_tokens": 96000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deephermes-3-llama-3-8b", + "name": "DeepHermes 3 Llama 3 8B", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deephermes-3-llama-3-8b-preview", + "name": "DeepHermes 3 Llama 3 8B Preview", + "cost_per_1m_in": 0.03, + "cost_per_1m_out": 0.11, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deephermes-3-llama-3-8b-preview", + "name": "DeepHermes 3 Llama 3 8B Preview (Free)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deephermes-3-mistral-24b-preview", + "name": "DeepHermes 3 Mistral 24B (OpenRouter)", + "cost_per_1m_in": 0.15, + "cost_per_1m_out": 0.59, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-3-1", + "name": "DeepSeek 3.1", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-3-1-terminus", + "name": "DeepSeek 3.1 Terminus", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-3-2-exp", + "name": "DeepSeek 3.2 Exp", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 8000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-chat-v3-0324", + "name": "DeepSeek Chat V3", + "cost_per_1m_in": 0.24069079, + "cost_per_1m_out": 0.83998003, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 163840, + "default_max_tokens": 163840, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-chat-v3-0324", + "name": "DeepSeek Chat V3 (Free)", + "cost_per_1m_in": 0.24069079, + "cost_per_1m_out": 0.83998003, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 163840, + "default_max_tokens": 163840, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-chat-v3-1", + "name": "DeepSeek Chat v3.1", + "cost_per_1m_in": 0.27082305, + "cost_per_1m_out": 0.99998189, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 32768, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-chat-v3-1", + "name": "DeepSeek Chat v3.1 (Free)", + "cost_per_1m_in": 0.27082305, + "cost_per_1m_out": 0.99998189, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 163800, + "default_max_tokens": 163800, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-ocr", + "name": "DeepSeek OCR (DeepInfra)", + "cost_per_1m_in": 0.03008224, + "cost_per_1m_out": 0.09999811, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8192, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "deepseek-prover-v2", + "name": "DeepSeek Prover V2 (OpenRouter)", + "cost_per_1m_in": 0.50179868, + "cost_per_1m_out": 2.17994516, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 163840, + "default_max_tokens": 163840, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-prover-v2-671b", + "name": "DeepSeek Prover V2 671B (DeepInfra)", + "cost_per_1m_in": 0.25072607, + "cost_per_1m_out": 0.87998178, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 163840, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-r1-0528-turbo", + "name": "DeepSeek R1 0528 Turbo (DeepInfra)", + "cost_per_1m_in": 1.00247525, + "cost_per_1m_out": 2.9999505, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-r1-distill-llama-70b", + "name": "DeepSeek R1 Distill Llama 70B", + "cost_per_1m_in": 0.10032814, + "cost_per_1m_out": 0.04879212, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8192, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-r1-distill-llama-70b", + "name": "DeepSeek R1 Distill Llama 70B (DeepInfra)", + "cost_per_1m_in": 0.50082034, + "cost_per_1m_out": 0.99997828, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-r1-distill-llama-70b", + "name": "DeepSeek R1 Distill Llama 70B (Free)", + "cost_per_1m_in": 0.03010647, + "cost_per_1m_out": 0.12999744, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8192, + "default_max_tokens": 4096, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-r1-distill-llama-70b", + "name": "DeepSeek R1 Distill Llama 70B (OpenRouter)", + "cost_per_1m_in": 0.03010647, + "cost_per_1m_out": 0.12999744, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-r1-distill-llama-70b", + "name": "DeepSeek R1 Distill Llama 70B (Together)", + "cost_per_1m_in": 2.00164069, + "cost_per_1m_out": 1.9999639, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-r1-distill-qwen-14b", + "name": "DeepSeek R1 Distill Qwen 14B", + "cost_per_1m_in": 0.15012275, + "cost_per_1m_out": 0.14999693, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-r1-distill-qwen-14b", + "name": "DeepSeek R1 Distill Qwen 14B (OpenRouter)", + "cost_per_1m_in": 0.15012275, + "cost_per_1m_out": 0.14999658, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 16384, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-r1-distill-qwen-14b", + "name": "DeepSeek R1 Distill Qwen 14B (Together)", + "cost_per_1m_in": 1.60131255, + "cost_per_1m_out": 1.59996611, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-r1-distill-qwen-32b", + "name": "DeepSeek R1 Distill Qwen 32B", + "cost_per_1m_in": 0.07512715, + "cost_per_1m_out": 0.14999695, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-r1-distill-qwen-32b", + "name": "DeepSeek R1 Distill Qwen 32B (OpenRouter)", + "cost_per_1m_in": 0.2902379, + "cost_per_1m_out": 0.28999374, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-r1t2-chimera", + "name": "DeepSeek R1T2 Chimera", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 163840, + "default_max_tokens": 163840, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-r1t2-chimera", + "name": "DeepSeek R1T2 Chimera (OpenRouter Free)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 163840, + "default_max_tokens": 163840, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-r1t2-chimera", + "name": "DeepSeek R1T2 Chimera (OpenRouter)", + "cost_per_1m_in": 0.3, + "cost_per_1m_out": 1.2, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 163840, + "default_max_tokens": 163840, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-v3-p-dp", + "name": "DeepSeek V3 (Together)", + "cost_per_1m_in": 1.2510305, + "cost_per_1m_out": 1.24997869, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 2048, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-v3-base", + "name": "DeepSeek V3 Base (OpenRouter)", + "cost_per_1m_in": 0.19990000000000002, + "cost_per_1m_out": 0.8000999999999999, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 163840, + "default_max_tokens": 163840, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-v3-2-exp", + "name": "DeepSeek V3.2 Exp (DeepInfra)", + "cost_per_1m_in": 0.27032895, + "cost_per_1m_out": 0.39999243, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 163840, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-v3-2-exp", + "name": "DeepSeek V3.2 Exp (DeepSeek)", + "cost_per_1m_in": 0.28034596, + "cost_per_1m_out": 0.41999273, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 8000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-v3-2-exp", + "name": "DeepSeek V3.2 Exp (OpenRouter)", + "cost_per_1m_in": 0.27032949, + "cost_per_1m_out": 0.39999308, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 163840, + "default_max_tokens": 163840, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-v3-1-base", + "name": "DeepSeek v3.1 Base (OpenRouter)", + "cost_per_1m_in": 0.25, + "cost_per_1m_out": 1, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 163840, + "default_max_tokens": 163840, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-r1", + "name": "DeepSeek-R1", + "cost_per_1m_in": 0.50177393, + "cost_per_1m_out": 2.14996449, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 65536, + "default_max_tokens": 2048, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-r3-1", + "name": "DeepSeek-R1 (671B)", + "cost_per_1m_in": 0.56138386, + "cost_per_1m_out": 1.67997094, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 32000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-r1", + "name": "DeepSeek-R1 (Bedrock US)", + "cost_per_1m_in": 1.35444444, + "cost_per_1m_out": 5.39990183, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 4096, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-r1", + "name": "DeepSeek-R1 (Bedrock)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 4096, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-r1", + "name": "DeepSeek-R1 (DeepInfra)", + "cost_per_1m_in": 0.7019802, + "cost_per_1m_out": 2.3999604, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 163840, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-r1", + "name": "DeepSeek-R1 (EdenAI, 65K)", + "cost_per_1m_in": 0.55, + "cost_per_1m_out": 2.1900000000000004, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 65536, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-r1", + "name": "DeepSeek-R1 (OpenRouter Free)", + "cost_per_1m_in": 0.40164609, + "cost_per_1m_out": 1.99996379, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 163840, + "default_max_tokens": 163840, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-r1", + "name": "DeepSeek-R1 (OpenRouter)", + "cost_per_1m_in": 0.40164609, + "cost_per_1m_out": 1.99996379, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 163840, + "default_max_tokens": 163840, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-r1-0528-tput", + "name": "DeepSeek-R1 (Together)", + "cost_per_1m_in": 0.55180544, + "cost_per_1m_out": 2.18996389, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 163840, + "default_max_tokens": 2048, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-r1", + "name": "DeepSeek-R1 (Together)", + "cost_per_1m_in": 3.00577082, + "cost_per_1m_out": 6.99988458, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 163840, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-r1-0528", + "name": "DeepSeek-R1 0528 (DeepInfra)", + "cost_per_1m_in": 0.49913366, + "cost_per_1m_out": 2.14801733, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 163840, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-r1-0528", + "name": "DeepSeek-R1 0528 (OpenRouter Free)", + "cost_per_1m_in": 0.40144033, + "cost_per_1m_out": 1.74997772, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 163840, + "default_max_tokens": 163840, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-r1-0528", + "name": "DeepSeek-R1 0528 (OpenRouter)", + "cost_per_1m_in": 0.40144033, + "cost_per_1m_out": 1.74997772, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 163840, + "default_max_tokens": 163840, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-r1-turbo", + "name": "DeepSeek-R1 Turbo", + "cost_per_1m_in": 1.00247525, + "cost_per_1m_out": 2.99995035, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-r1-turbo", + "name": "DeepSeek-R1 Turbo (DeepInfra)", + "cost_per_1m_in": 1.00247525, + "cost_per_1m_out": 2.9999505, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 40960, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-r1-0528-qwen-3-8b", + "name": "DeepSeek-R1-0528 Qwen 3 8B (32K)", + "cost_per_1m_in": 0.03009009, + "cost_per_1m_out": 0.10999784, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-r1-0528-qwen-3-8b", + "name": "DeepSeek-R1-0528 Qwen 3 8B (Free, 131K)", + "cost_per_1m_in": 0.03009009, + "cost_per_1m_out": 0.10999784, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-r1-qwen-3-8b", + "name": "DeepSeek-R1-Qwen-3 8B", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-r1-zero", + "name": "DeepSeek-R1-Zero (OpenRouter)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 163840, + "default_max_tokens": 163840, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-r1t-chimera", + "name": "DeepSeek-R1T-Chimera", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 163840, + "default_max_tokens": 163840, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-r1t-chimera", + "name": "DeepSeek-R1T-Chimera (OpenRouter Free)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 163840, + "default_max_tokens": 163840, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-r1t-chimera", + "name": "DeepSeek-R1T-Chimera (OpenRouter)", + "cost_per_1m_in": 0.3, + "cost_per_1m_out": 1.2, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 163840, + "default_max_tokens": 163840, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-v3", + "name": "DeepSeek-V3", + "cost_per_1m_in": 0.38073372, + "cost_per_1m_out": 0.877972, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 65536, + "default_max_tokens": 2048, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-v3", + "name": "DeepSeek-V3 (DeepInfra)", + "cost_per_1m_in": 0.38073432, + "cost_per_1m_out": 0.8899806, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 163840, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-v3", + "name": "DeepSeek-V3 (EdenAI)", + "cost_per_1m_in": 0.40742786, + "cost_per_1m_out": 1.09634014, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 65536, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-v3", + "name": "DeepSeek-V3 (OpenRouter)", + "cost_per_1m_in": 0.30070074, + "cost_per_1m_out": 0.84998001, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 163840, + "default_max_tokens": 163840, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-v3", + "name": "DeepSeek-V3 (Together)", + "cost_per_1m_in": 1.2510305, + "cost_per_1m_out": 1.24997684, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 2048, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-v3-0324-turbo", + "name": "DeepSeek-V3 Turbo (DeepInfra)", + "cost_per_1m_in": 0.25072607, + "cost_per_1m_out": 0.87998501, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-v3-0324", + "name": "DeepSeek-V3-0324 (DeepInfra)", + "cost_per_1m_in": 0.25072607, + "cost_per_1m_out": 0.87998359, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 163840, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-v3-1", + "name": "DeepSeek-V3.1 (DeepInfra)", + "cost_per_1m_in": 0.26939967, + "cost_per_1m_out": 0.99877381, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 163840, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-v3-1-terminus", + "name": "DeepSeek-V3.1 Terminus (DeepInfra)", + "cost_per_1m_in": 0.24662273, + "cost_per_1m_out": 0.99936092, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 163840, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-v3-1-terminus", + "name": "DeepSeek-V3.1 Terminus (OpenRouter)", + "cost_per_1m_in": 0.70625308, + "cost_per_1m_out": 0.88999869, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 163840, + "default_max_tokens": 163840, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "deepseek-v3-1-terminus", + "name": "DeepSeek-V3.1 Terminus Exacto (131K)", + "cost_per_1m_in": 0.70625308, + "cost_per_1m_out": 0.88999869, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 65536, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "devstral-medium", + "name": "Devstral Medium (OpenRouter)", + "cost_per_1m_in": 0.40160514, + "cost_per_1m_out": 1.99991919, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "devstral-small", + "name": "Devstral Small", + "cost_per_1m_in": 0.07022472, + "cost_per_1m_out": 0.02825455, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "devstral-small-2505", + "name": "Devstral Small (DeepInfra)", + "cost_per_1m_in": 0.07516051, + "cost_per_1m_out": 0.19999615, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "devstral-small-2505", + "name": "Devstral Small (OpenRouter Free, 32K)", + "cost_per_1m_in": 0.05017657, + "cost_per_1m_out": 0.21973975, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "devstral-small-2505", + "name": "Devstral Small (OpenRouter)", + "cost_per_1m_in": 0.05017657, + "cost_per_1m_out": 0.21973975, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "devstral-small", + "name": "Devstral Small (OpenRouter)", + "cost_per_1m_in": 0.07022472, + "cost_per_1m_out": 0.27999428, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 128000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "devstral-small-2507", + "name": "Devstral Small 2507 (DeepInfra)", + "cost_per_1m_in": 0.07516051, + "cost_per_1m_out": 0.19999647, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "dobby-mini-unhinged-plus-llama-3-1-8b", + "name": "Dobby Mini Unhinged Plus Llama 3.1 8B (131K)", + "cost_per_1m_in": 0.20016, + "cost_per_1m_out": 0.19990505, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "dolphin-2-6-mixtral-8x7b", + "name": "Dolphin 2.6 Mixtral 8x7B (DeepInfra)", + "cost_per_1m_in": 0.12031889, + "cost_per_1m_out": 0.38999139, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "dolphin-3-mistral-24b", + "name": "Dolphin 3 Mistral 24B", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "dolphin-3-r1-mistral-24b", + "name": "Dolphin 3 R1 Mistral 24B", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "dolphin-3-0-mistral-24b", + "name": "Dolphin 3.0 Mistral 24B", + "cost_per_1m_in": 0.04, + "cost_per_1m_out": 0.16999999999999998, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "dolphin-3-0-mistral-24b", + "name": "Dolphin 3.0 Mistral 24B (Free)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "dolphin-3-0-r1-mistral-24b", + "name": "Dolphin 3.0 R1 Mistral 24B", + "cost_per_1m_in": 0.01, + "cost_per_1m_out": 0.049999999999999996, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "dolphin-3-0-r1-mistral-24b", + "name": "Dolphin 3.0 R1 Mistral 24B (Free)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "dolphin-mistral-24b-venice-edition", + "name": "Dolphin Mistral 24B Venice (Free)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "dolphin-mistral-24b-venice-edition", + "name": "Dolphin Mistral 24B Venice (OpenRouter)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "dolphin-mistral-24b-venice-edition", + "name": "Dolphin Mistral 24B Venice Edition", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "ernie-4-5-21b-a3b", + "name": "ERNIE 4.5 21B-A3B", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 120000, + "default_max_tokens": 8000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "ernie-4-5-21b-a3b", + "name": "ERNIE 4.5 21B-A3B (OpenRouter)", + "cost_per_1m_in": 0.07023083, + "cost_per_1m_out": 0.27999419, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 120000, + "default_max_tokens": 8000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "ernie-4-5-300b-a47b", + "name": "ERNIE 4.5 300B (123K)", + "cost_per_1m_in": 0.28086751, + "cost_per_1m_out": 1.09998433, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 123000, + "default_max_tokens": 12000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "ernie-4-5-vl-424b-a47b", + "name": "ERNIE 4.5 VL 424B (Vision, 123K)", + "cost_per_1m_in": 0.4209858, + "cost_per_1m_out": 1.24998236, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 123000, + "default_max_tokens": 16000, + "can_reason": true, + "has_reasoning_efforts": true, + "supports_attachments": true + }, + { + "id": "ernie-4-5-vl-28b-a3b", + "name": "ERNIE 4.5 Vision 28B (OpenRouter)", + "cost_per_1m_in": 0.14, + "cost_per_1m_out": 0.56, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 30000, + "default_max_tokens": 8000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "ernie-4-5-21b-a3b-thinking", + "name": "ERNIE-4.5-21B-A3B Thinking (OpenRouter)", + "cost_per_1m_in": 0.07020231, + "cost_per_1m_out": 0.2799913, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 65536, + "can_reason": true, + "has_reasoning_efforts": true, + "supports_attachments": false + }, + { + "id": "eva-qwen-2-5-32b", + "name": "EVA Qwen 2.5 32B (OpenRouter)", + "cost_per_1m_in": 2.60278005, + "cost_per_1m_out": 3.3998238, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 16384, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "l3-1-70b-euryale-v2-2", + "name": "Euryale 3.1 70B v2.2 (DeepInfra)", + "cost_per_1m_in": 0.65061224, + "cost_per_1m_out": 0.74997691, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "l3-1-euryale-70b", + "name": "Euryale L3.1 70B v2.2", + "cost_per_1m_in": 0.65061175, + "cost_per_1m_out": 0.74998203, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "l3-3-euryale-70b", + "name": "Euryale L3.3 70B", + "cost_per_1m_in": 0.65061224, + "cost_per_1m_out": 0.74998224, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "glm-4-32b", + "name": "GLM-4 32B", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32000, + "default_max_tokens": 32000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "glm-4-32b", + "name": "GLM-4 32B (OpenRouter)", + "cost_per_1m_in": 0.10008197, + "cost_per_1m_out": 0.09999803, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 128000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "glm-4-9b", + "name": "GLM-4 9B (OpenRouter)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32000, + "default_max_tokens": 32000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "glm-4-5", + "name": "GLM-4.5", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "glm-4-5", + "name": "GLM-4.5 (DeepInfra)", + "cost_per_1m_in": 0.55163934, + "cost_per_1m_out": 1.99996066, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "glm-4-5", + "name": "GLM-4.5 (OpenRouter)", + "cost_per_1m_in": 0.35127049, + "cost_per_1m_out": 1.54996443, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": true, + "has_reasoning_efforts": true, + "supports_attachments": false + }, + { + "id": "glm-4-5-air", + "name": "GLM-4.5-Air", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "glm-4-5-air", + "name": "GLM-4.5-Air (DeepInfra)", + "cost_per_1m_in": 0.03010878, + "cost_per_1m_out": 0.13998988, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "glm-4-5-air", + "name": "GLM-4.5-Air (OpenRouter Free)", + "cost_per_1m_in": 0.13069444, + "cost_per_1m_out": 0.84998056, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": true, + "has_reasoning_efforts": true, + "supports_attachments": false + }, + { + "id": "glm-4-5-air", + "name": "GLM-4.5-Air (OpenRouter)", + "cost_per_1m_in": 0.13069444, + "cost_per_1m_out": 0.84998056, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 98304, + "can_reason": true, + "has_reasoning_efforts": true, + "supports_attachments": false + }, + { + "id": "glm-4-5v", + "name": "GLM-4.5V", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 65536, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "glm-4-5v", + "name": "GLM-4.5V (DeepInfra)", + "cost_per_1m_in": 0.5, + "cost_per_1m_out": 1.7, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 65536, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "glm-4-5v", + "name": "GLM-4.5V (OpenRouter)", + "cost_per_1m_in": 0.601473, + "cost_per_1m_out": 1.7999617, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 65536, + "default_max_tokens": 16384, + "can_reason": true, + "has_reasoning_efforts": true, + "supports_attachments": true + }, + { + "id": "glm-4-6", + "name": "GLM-4.6", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 202752, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "glm-4-6", + "name": "GLM-4.6 (DeepInfra)", + "cost_per_1m_in": 0.6, + "cost_per_1m_out": 1.9, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 202752, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "glm-4-6", + "name": "GLM-4.6 (OpenRouter Exacto)", + "cost_per_1m_in": 0.45172131, + "cost_per_1m_out": 2.09995869, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 202752, + "default_max_tokens": 202752, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "glm-4-6", + "name": "GLM-4.6 (OpenRouter)", + "cost_per_1m_in": 0.45172131, + "cost_per_1m_out": 2.09995869, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 202752, + "default_max_tokens": 202752, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "glm-z1-9b", + "name": "GLM-Z1 9B (OpenRouter)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32000, + "default_max_tokens": 32000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "glm-z1-32b", + "name": "GLM-Z1-32B Reasoning (OpenRouter)", + "cost_per_1m_in": 0.24019402, + "cost_per_1m_out": 0.23999224, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-3-5-turbo", + "name": "GPT-3.5 Turbo", + "cost_per_1m_in": 1.50163532, + "cost_per_1m_out": 1.41081329, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 4095, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-3-5-turbo", + "name": "GPT-3.5 Turbo (EdenAI, Image Output)", + "cost_per_1m_in": 1.50163532, + "cost_per_1m_out": 1.99981103, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 16385, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-3-5-turbo-0125", + "name": "GPT-3.5 Turbo (OpenAI)", + "cost_per_1m_in": 0.50122649, + "cost_per_1m_out": 1.4998994, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 16385, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-3-5-turbo-1106", + "name": "GPT-3.5 Turbo (OpenAI)", + "cost_per_1m_in": 1.00163532, + "cost_per_1m_out": 1.99981514, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 16385, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-3-5-turbo", + "name": "GPT-3.5 Turbo (OpenAI)", + "cost_per_1m_in": 0.50122649, + "cost_per_1m_out": 1.49988529, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 16385, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-3-5-turbo-0613", + "name": "GPT-3.5 Turbo (OpenRouter)", + "cost_per_1m_in": 1.00163532, + "cost_per_1m_out": 1.9998578, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 4095, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-3-5-turbo", + "name": "GPT-3.5 Turbo (OpenRouter)", + "cost_per_1m_in": 0.50122649, + "cost_per_1m_out": 1.49981984, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 16385, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-3-5-turbo", + "name": "GPT-3.5 Turbo (Vision, 4K)", + "cost_per_1m_in": 1.50163532, + "cost_per_1m_out": 1.99981103, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 4097, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-3-5-turbo-16k", + "name": "GPT-3.5 Turbo 16K (OpenRouter)", + "cost_per_1m_in": 3.00327065, + "cost_per_1m_out": 3.99963967, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 16385, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-3-5-turbo-16k", + "name": "GPT-3.5 Turbo 16K Vision (EdenAI)", + "cost_per_1m_in": 3, + "cost_per_1m_out": 4, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 16385, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-3-5-turbo-instruct", + "name": "GPT-3.5 Turbo Instruct", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 4095, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-3-5-turbo-instruct", + "name": "GPT-3.5 Turbo Instruct (EdenAI)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 4096, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-3-5-turbo-instruct-0914", + "name": "GPT-3.5 Turbo Instruct (OpenAI)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 0, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-3-5-turbo-instruct", + "name": "GPT-3.5 Turbo Instruct (OpenAI)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 0, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-3-5-turbo-instruct", + "name": "GPT-3.5 Turbo Instruct (OpenRouter)", + "cost_per_1m_in": 1.50163532, + "cost_per_1m_out": 1.99984982, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 4095, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-4", + "name": "GPT-4", + "cost_per_1m_in": 30.04905969, + "cost_per_1m_out": 29.31770216, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8191, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-4", + "name": "GPT-4 (EdenAI/Azure)", + "cost_per_1m_in": 30.04905969, + "cost_per_1m_out": 59.99718421, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8192, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-4-0613", + "name": "GPT-4 (OpenAI)", + "cost_per_1m_in": 30.04905969, + "cost_per_1m_out": 59.99773838, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8192, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-4", + "name": "GPT-4 (OpenAI)", + "cost_per_1m_in": 30.04905969, + "cost_per_1m_out": 59.99781209, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8192, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-4", + "name": "GPT-4 (OpenRouter)", + "cost_per_1m_in": 30.04905969, + "cost_per_1m_out": 59.99761579, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8191, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-4-0314", + "name": "GPT-4 0314 (OpenRouter)", + "cost_per_1m_in": 30.04905969, + "cost_per_1m_out": 59.99794927, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8191, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-4", + "name": "GPT-4 Code (EdenAI)", + "cost_per_1m_in": 30.04905969, + "cost_per_1m_out": 59.99718421, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8192, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-4-turbo", + "name": "GPT-4 Turbo", + "cost_per_1m_in": 10.02452984, + "cost_per_1m_out": 29.99915076, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-4-turbo", + "name": "GPT-4 Turbo (EdenAI)", + "cost_per_1m_in": 10.02452984, + "cost_per_1m_out": 29.99923436, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-4-1106-preview", + "name": "GPT-4 Turbo (OpenAI)", + "cost_per_1m_in": 10.02452984, + "cost_per_1m_out": 29.999226, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-4-turbo", + "name": "GPT-4 Turbo (OpenAI)", + "cost_per_1m_in": 10.02452984, + "cost_per_1m_out": 29.99923893, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-4-turbo-2024-04-09", + "name": "GPT-4 Turbo (OpenAI, 128K)", + "cost_per_1m_in": 10.02452984, + "cost_per_1m_out": 29.99924255, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-4-0125-preview", + "name": "GPT-4 Turbo (OpenAI, 128K)", + "cost_per_1m_in": 10.02452984, + "cost_per_1m_out": 29.99911665, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-4-turbo", + "name": "GPT-4 Turbo (OpenRouter)", + "cost_per_1m_in": 10.02452984, + "cost_per_1m_out": 29.99929992, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-4-1106-preview", + "name": "GPT-4 Turbo (OpenRouter)", + "cost_per_1m_in": 10.02452984, + "cost_per_1m_out": 29.99926524, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-4-turbo-preview", + "name": "GPT-4 Turbo Preview (OpenAI)", + "cost_per_1m_in": 10.02452984, + "cost_per_1m_out": 29.99911543, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-4-turbo-preview", + "name": "GPT-4 Turbo Preview (OpenRouter)", + "cost_per_1m_in": 10.02452984, + "cost_per_1m_out": 29.9992976, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-4-1", + "name": "GPT-4.1", + "cost_per_1m_in": 2.00655738, + "cost_per_1m_out": 7.99982951, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1047576, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-4-1-2025-04-14", + "name": "GPT-4.1 (EdenAI, Image Output)", + "cost_per_1m_in": 2.00655738, + "cost_per_1m_out": 7.99982761, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1047576, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-4-1-2025-04-14", + "name": "GPT-4.1 (OpenAI)", + "cost_per_1m_in": 2.00655738, + "cost_per_1m_out": 7.99980844, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1047576, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-4-1", + "name": "GPT-4.1 (OpenAI)", + "cost_per_1m_in": 2.00655738, + "cost_per_1m_out": 7.99980426, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1047576, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-4-1", + "name": "GPT-4.1 (OpenRouter)", + "cost_per_1m_in": 2.00655738, + "cost_per_1m_out": 7.99982129, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1047576, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-4-1-mini", + "name": "GPT-4.1 Mini", + "cost_per_1m_in": 0.40131148, + "cost_per_1m_out": 1.5999659, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1047576, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-4-1-mini-2025-04-14", + "name": "GPT-4.1 Mini (EdenAI Vision+)", + "cost_per_1m_in": 0.40131148, + "cost_per_1m_out": 1.5999659, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1047576, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-4-1-mini", + "name": "GPT-4.1 Mini (OpenAI)", + "cost_per_1m_in": 0.40131148, + "cost_per_1m_out": 1.5999659, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1047576, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-4-1-mini-2025-04-14", + "name": "GPT-4.1 Mini (OpenAI)", + "cost_per_1m_in": 0.40131148, + "cost_per_1m_out": 1.5999659, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1047576, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-4-1-mini", + "name": "GPT-4.1 Mini (OpenRouter)", + "cost_per_1m_in": 0.40131148, + "cost_per_1m_out": 1.5999659, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1047576, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-4-1-nano", + "name": "GPT-4.1 Nano", + "cost_per_1m_in": 0.10032787, + "cost_per_1m_out": 0.39999046, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1047576, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-4-1-nano-2025-04-14", + "name": "GPT-4.1 Nano (EdenAI)", + "cost_per_1m_in": 0.10032787, + "cost_per_1m_out": 0.39999133, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1047576, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-4-1-nano-2025-04-14", + "name": "GPT-4.1 Nano (OpenAI)", + "cost_per_1m_in": 0.10032787, + "cost_per_1m_out": 0.3999914, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1047576, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-4-1-nano", + "name": "GPT-4.1 Nano (OpenAI)", + "cost_per_1m_in": 0.10032787, + "cost_per_1m_out": 0.39999148, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1047576, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-4-1-nano", + "name": "GPT-4.1 Nano (OpenRouter)", + "cost_per_1m_in": 0.10032787, + "cost_per_1m_out": 0.39999148, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1047576, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-4-5-preview-2025-02-27", + "name": "GPT-4.5 Preview (EdenAI)", + "cost_per_1m_in": 75.12295082, + "cost_per_1m_out": 149.99639196, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-4o", + "name": "GPT-4o", + "cost_per_1m_in": 6.0147541, + "cost_per_1m_out": 9.86501683, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-4o-2024-05-13", + "name": "GPT-4o (EdenAI Code)", + "cost_per_1m_in": 5.01229508, + "cost_per_1m_out": 14.99963508, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-4o", + "name": "GPT-4o (EdenAI/Azure)", + "cost_per_1m_in": 2.50819672, + "cost_per_1m_out": 9.99975727, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-4o", + "name": "GPT-4o (EdenAI/OpenAI)", + "cost_per_1m_in": 2.50819672, + "cost_per_1m_out": 9.99975727, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-4o-2024-05-13", + "name": "GPT-4o (OpenAI)", + "cost_per_1m_in": 5.01229508, + "cost_per_1m_out": 14.99946454, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-4o-2024-11-20", + "name": "GPT-4o (OpenAI)", + "cost_per_1m_in": 2.50819672, + "cost_per_1m_out": 9.99978689, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-4o", + "name": "GPT-4o (OpenAI)", + "cost_per_1m_in": 2.50819672, + "cost_per_1m_out": 9.99955877, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-4o-2024-11-20", + "name": "GPT-4o (OpenRouter)", + "cost_per_1m_in": 2.50819672, + "cost_per_1m_out": 9.9997681, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-4o", + "name": "GPT-4o (OpenRouter)", + "cost_per_1m_in": 2.50819672, + "cost_per_1m_out": 9.99963445, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-4o-2024-05-13", + "name": "GPT-4o (OpenRouter)", + "cost_per_1m_in": 5.01229508, + "cost_per_1m_out": 14.99963256, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-4o-audio-preview", + "name": "GPT-4o Audio Preview (OpenRouter)", + "cost_per_1m_in": 2.5, + "cost_per_1m_out": 10, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-4o-2024-08-06", + "name": "GPT-4o Aug 2024 (EdenAI)", + "cost_per_1m_in": 2.50819672, + "cost_per_1m_out": 9.99972955, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-4o-2024-08-06", + "name": "GPT-4o Aug 2024 (OpenAI)", + "cost_per_1m_in": 2.50819672, + "cost_per_1m_out": 9.99957377, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-4o-2024-08-06", + "name": "GPT-4o Aug 2024 (OpenRouter)", + "cost_per_1m_in": 2.50819672, + "cost_per_1m_out": 9.99966753, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-4o", + "name": "GPT-4o Extended (OpenRouter)", + "cost_per_1m_in": 2.50819672, + "cost_per_1m_out": 9.99963445, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 64000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-4o-mini", + "name": "GPT-4o Mini", + "cost_per_1m_in": 0.1504918, + "cost_per_1m_out": 0.59998117, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-4o-mini-2024-07-18", + "name": "GPT-4o Mini (EdenAI)", + "cost_per_1m_in": 0.1504918, + "cost_per_1m_out": 0.59998181, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-4o-mini", + "name": "GPT-4o Mini (EdenAI/Azure)", + "cost_per_1m_in": 0.1504918, + "cost_per_1m_out": 0.5999849, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-4o-mini", + "name": "GPT-4o Mini (EdenAI/OpenAI)", + "cost_per_1m_in": 0.1504918, + "cost_per_1m_out": 0.5999849, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-4o-mini-2024-07-18", + "name": "GPT-4o Mini (OpenAI)", + "cost_per_1m_in": 0.1504918, + "cost_per_1m_out": 0.59998458, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-4o-mini-2024-07-18", + "name": "GPT-4o Mini (OpenRouter)", + "cost_per_1m_in": 0.1504918, + "cost_per_1m_out": 0.5999835, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-4o-mini", + "name": "GPT-4o Mini (OpenRouter)", + "cost_per_1m_in": 0.1504918, + "cost_per_1m_out": 0.59998444, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-4o-mini", + "name": "GPT-4o Mini (Pool)", + "cost_per_1m_in": 0.1504918, + "cost_per_1m_out": 0.59998459, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-4o-mini-realtime", + "name": "GPT-4o Mini Realtime", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 4096, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-4o-mini-realtime-preview", + "name": "GPT-4o Mini Realtime Preview (OpenAI)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 0, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-4o-mini-realtime-preview-2024-12-17", + "name": "GPT-4o Mini Realtime Preview (OpenAI)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 4096, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-4o-mini-search", + "name": "GPT-4o Mini Search", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-4o-mini-search-preview", + "name": "GPT-4o Mini Search (OpenAI)", + "cost_per_1m_in": 0.15, + "cost_per_1m_out": 0.6, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-4o-mini-search-preview", + "name": "GPT-4o Mini Search (OpenRouter)", + "cost_per_1m_in": 0.29839242, + "cost_per_1m_out": 0.59686379, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-4o-mini-search-preview-2025-03-11", + "name": "GPT-4o Mini Search Preview (OpenAI)", + "cost_per_1m_in": 0.15, + "cost_per_1m_out": 0.6, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-4o-realtime", + "name": "GPT-4o Realtime", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 4096, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-4o-realtime-preview-2024-10-01", + "name": "GPT-4o Realtime Preview (OpenAI)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 4096, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-4o-realtime-preview-2024-12-17", + "name": "GPT-4o Realtime Preview (OpenAI)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 4096, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-4o-realtime-preview", + "name": "GPT-4o Realtime Preview (OpenAI)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 0, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-4o-realtime-preview-2025-06-03", + "name": "GPT-4o Realtime Preview (OpenAI)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 0, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-4o-search", + "name": "GPT-4o Search", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-4o-search-preview-2025-03-11", + "name": "GPT-4o Search Preview (OpenAI)", + "cost_per_1m_in": 2.5, + "cost_per_1m_out": 10, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-4o-search-preview", + "name": "GPT-4o Search Preview (OpenAI)", + "cost_per_1m_in": 2.5, + "cost_per_1m_out": 10, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-4o-search-preview", + "name": "GPT-4o Search Preview (OpenRouter)", + "cost_per_1m_in": 5.02267106, + "cost_per_1m_out": 9.86344516, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-5", + "name": "GPT-5", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-5", + "name": "GPT-5 (EdenAI, Image Out)", + "cost_per_1m_in": 9.97846596, + "cost_per_1m_out": 9.78178835, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-5-2025-08-07", + "name": "GPT-5 (OpenAI)", + "cost_per_1m_in": 7.29593929, + "cost_per_1m_out": 9.84885152, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 272000, + "default_max_tokens": 128000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-5", + "name": "GPT-5 (OpenAI, 272K)", + "cost_per_1m_in": 6.40176374, + "cost_per_1m_out": 9.87120591, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 272000, + "default_max_tokens": 128000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-5", + "name": "GPT-5 (OpenRouter, 400K)", + "cost_per_1m_in": 5.761895, + "cost_per_1m_out": 9.88250273, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 400000, + "default_max_tokens": 128000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-5-chat-latest", + "name": "GPT-5 Chat (EdenAI)", + "cost_per_1m_in": 1.35491803, + "cost_per_1m_out": 9.99727213, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-5-chat-latest", + "name": "GPT-5 Chat (OpenAI)", + "cost_per_1m_in": 2.69262295, + "cost_per_1m_out": 9.96218932, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 16384, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-5-chat", + "name": "GPT-5 Chat (OpenRouter)", + "cost_per_1m_in": 2.34016393, + "cost_per_1m_out": 9.97165574, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 16384, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-5-codex", + "name": "GPT-5 Codex", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 272000, + "default_max_tokens": 128000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-5-codex", + "name": "GPT-5 Codex (OpenAI Vision)", + "cost_per_1m_in": 1.25, + "cost_per_1m_out": 10, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 272000, + "default_max_tokens": 128000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-5-image", + "name": "GPT-5 Image (OpenRouter)", + "cost_per_1m_in": 14.52274183, + "cost_per_1m_out": 8.27099349, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 400000, + "default_max_tokens": 128000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-5-image-mini", + "name": "GPT-5 Image Mini (OpenRouter)", + "cost_per_1m_in": 3.24439462, + "cost_per_1m_out": 1.71542414, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 400000, + "default_max_tokens": 128000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-5-mini", + "name": "GPT-5 Mini", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-5-mini", + "name": "GPT-5 Mini (EdenAI)", + "cost_per_1m_in": 1.24917966, + "cost_per_1m_out": 1.97502051, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-5-mini", + "name": "GPT-5 Mini (OpenAI)", + "cost_per_1m_in": 1.44278097, + "cost_per_1m_out": 1.97018048, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 272000, + "default_max_tokens": 128000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-5-mini-2025-08-07", + "name": "GPT-5 Mini (OpenAI)", + "cost_per_1m_in": 0.87346185, + "cost_per_1m_out": 1.98441345, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 272000, + "default_max_tokens": 128000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-5-mini", + "name": "GPT-5 Mini (OpenRouter, 400K)", + "cost_per_1m_in": 1.05065628, + "cost_per_1m_out": 1.97914958, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 400000, + "default_max_tokens": 128000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-5-nano-2025-08-07", + "name": "GPT-5 Nano (OpenAI)", + "cost_per_1m_in": 0.51300246, + "cost_per_1m_out": 0.38842494, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 272000, + "default_max_tokens": 128000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-5-pro", + "name": "GPT-5 Pro", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 400000, + "default_max_tokens": 128000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-5-pro", + "name": "GPT-5 Pro (EdenAI)", + "cost_per_1m_in": 15, + "cost_per_1m_out": 120, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 0, + "default_max_tokens": 272000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-5-pro", + "name": "GPT-5 Pro (OpenAI)", + "cost_per_1m_in": 15, + "cost_per_1m_out": 120, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 400000, + "default_max_tokens": 272000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-5-pro-2025-10-06", + "name": "GPT-5 Pro (OpenAI)", + "cost_per_1m_in": 15, + "cost_per_1m_out": 120, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 400000, + "default_max_tokens": 272000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-5-pro", + "name": "GPT-5 Pro (OpenRouter)", + "cost_per_1m_in": 15, + "cost_per_1m_out": 120, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 400000, + "default_max_tokens": 128000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-5-search-api", + "name": "GPT-5 Search API", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-5-search-api", + "name": "GPT-5 Search API (OpenAI)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 0, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-5-search-api-2025-10-14", + "name": "GPT-5 Search API (OpenAI)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 0, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-5-nano", + "name": "GPT-5-Nano", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-5-nano", + "name": "GPT-5-Nano (EdenAI, Vision+Image Out)", + "cost_per_1m_in": 0.75746514, + "cost_per_1m_out": 0.38231337, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-5-nano", + "name": "GPT-5-Nano (OpenAI)", + "cost_per_1m_in": 0.37584085, + "cost_per_1m_out": 0.39185398, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 272000, + "default_max_tokens": 128000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gpt-5-nano", + "name": "GPT-5-Nano (OpenRouter, 400K)", + "cost_per_1m_in": 0.59733388, + "cost_per_1m_out": 0.38574651, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 400000, + "default_max_tokens": 128000, + "can_reason": true, + "has_reasoning_efforts": true, + "supports_attachments": true + }, + { + "id": "gpt-oss-120b-1", + "name": "GPT-OSS 120B (Bedrock)", + "cost_per_1m_in": 0.15046802, + "cost_per_1m_out": 0.59995881, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 128000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-oss-120b-turbo", + "name": "GPT-OSS 120B Turbo (DeepInfra)", + "cost_per_1m_in": 0.1504662, + "cost_per_1m_out": 0.59995664, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-oss-20b-1", + "name": "GPT-OSS 20B (Bedrock)", + "cost_per_1m_in": 0.07023401, + "cost_per_1m_out": 0.29997941, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 128000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-oss-120b", + "name": "GPT-OSS-120B", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-oss-120b", + "name": "GPT-OSS-120B (DeepInfra)", + "cost_per_1m_in": 0.05018648, + "cost_per_1m_out": 0.23998266, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-oss-120b", + "name": "GPT-OSS-120B (OpenRouter)", + "cost_per_1m_in": 0.0403125, + "cost_per_1m_out": 0.39997312, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": true, + "has_reasoning_efforts": true, + "supports_attachments": false + }, + { + "id": "gpt-oss-120b", + "name": "GPT-OSS-120B Exacto (OpenRouter)", + "cost_per_1m_in": 0.0403125, + "cost_per_1m_out": 0.39997312, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": true, + "has_reasoning_efforts": true, + "supports_attachments": false + }, + { + "id": "gpt-oss-20b", + "name": "GPT-OSS-20B", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 2048, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-oss-20b", + "name": "GPT-OSS-20B (DeepInfra)", + "cost_per_1m_in": 0.03010878, + "cost_per_1m_out": 0.13998988, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gpt-oss-20b", + "name": "GPT-OSS-20B (OpenRouter Free)", + "cost_per_1m_in": 0.03010937, + "cost_per_1m_out": 0.13999325, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": true, + "has_reasoning_efforts": true, + "supports_attachments": false + }, + { + "id": "gpt-oss-20b", + "name": "GPT-OSS-20B (OpenRouter)", + "cost_per_1m_in": 0.03010937, + "cost_per_1m_out": 0.13999325, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": true, + "has_reasoning_efforts": true, + "supports_attachments": false + }, + { + "id": "gpt-oss-20b", + "name": "GPT-OSS-20B (Together)", + "cost_per_1m_in": 0.05015625, + "cost_per_1m_out": 0.19998656, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 2048, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gemini-1-5-flash", + "name": "Gemini 1.5 Flash", + "cost_per_1m_in": 0.03762336, + "cost_per_1m_out": 0.30350017, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1000000, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gemini-1-5-flash", + "name": "Gemini 1.5 Flash (DeepInfra)", + "cost_per_1m_in": 0.07524671, + "cost_per_1m_out": 0.30000091, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1000000, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gemini-1-5-flash", + "name": "Gemini 1.5 Flash (EdenAI)", + "cost_per_1m_in": 0.07525103, + "cost_per_1m_out": 0.30000083, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1000000, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gemini-1-5-flash-latest", + "name": "Gemini 1.5 Flash (EdenAI)", + "cost_per_1m_in": 0.07525103, + "cost_per_1m_out": 0.30000086, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1048576, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gemini-1-5-flash-8b", + "name": "Gemini 1.5 Flash 8B", + "cost_per_1m_in": 0.00394737, + "cost_per_1m_out": 0.15067638, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1000000, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gemini-1-5-flash-8b", + "name": "Gemini 1.5 Flash 8B (DeepInfra)", + "cost_per_1m_in": 0.03762336, + "cost_per_1m_out": 0.15000043, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1000000, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gemini-1-5-flash-8b", + "name": "Gemini 1.5 Flash 8B (EdenAI Multimodal)", + "cost_per_1m_in": 0.00395062, + "cost_per_1m_out": 0.00705341, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1048576, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gemini-1-5-flash-8b-latest", + "name": "Gemini 1.5 Flash 8B (EdenAI)", + "cost_per_1m_in": 0.59656582, + "cost_per_1m_out": 0.28818307, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 0, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gemini-1-5-pro", + "name": "Gemini 1.5 Pro", + "cost_per_1m_in": 6.9979852, + "cost_per_1m_out": 4.8809796, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1048576, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gemini-1-5-pro", + "name": "Gemini 1.5 Pro (EdenAI Vision)", + "cost_per_1m_in": 3.50864198, + "cost_per_1m_out": 10.49983567, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 2097152, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gemini-1-5-pro-latest", + "name": "Gemini 1.5 Pro (EdenAI)", + "cost_per_1m_in": 3.5008642, + "cost_per_1m_out": 1.04998531, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1048576, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gemini-2-flash", + "name": "Gemini 2.0 Flash", + "cost_per_1m_in": 0.10032922, + "cost_per_1m_out": 0.3999944, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1000000, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gemini-2-flash-exp", + "name": "Gemini 2.0 Flash", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1048576, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gemini-2-0-flash-001", + "name": "Gemini 2.0 Flash (DeepInfra)", + "cost_per_1m_in": 0.10032922, + "cost_per_1m_out": 0.39999439, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1000000, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gemini-2-0-flash-exp", + "name": "Gemini 2.0 Flash (EdenAI)", + "cost_per_1m_in": 10, + "cost_per_1m_out": 10, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1048576, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gemini-2-0-flash", + "name": "Gemini 2.0 Flash (EdenAI)", + "cost_per_1m_in": 0.10032922, + "cost_per_1m_out": 0.39999441, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1048576, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gemini-2-0-flash-001", + "name": "Gemini 2.0 Flash (OpenRouter)", + "cost_per_1m_in": 0.10032922, + "cost_per_1m_out": 0.39999439, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1048576, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gemini-2-0-flash-exp", + "name": "Gemini 2.0 Flash (OpenRouter)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1048576, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gemini-2-0-flash-exp", + "name": "Gemini 2.0 Flash Free (OpenRouter)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1048576, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gemini-2-flash-lite", + "name": "Gemini 2.0 Flash Lite", + "cost_per_1m_in": 0.07524671, + "cost_per_1m_out": 0.30000081, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1048570, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gemini-2-0-flash-lite", + "name": "Gemini 2.0 Flash Lite (EdenAI)", + "cost_per_1m_in": 0.07525103, + "cost_per_1m_out": 0.30000074, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1048570, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gemini-2-0-flash-lite-preview-02-05", + "name": "Gemini 2.0 Flash Lite (EdenAI)", + "cost_per_1m_in": 0.07525103, + "cost_per_1m_out": 0.30000073, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1048576, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gemini-2-0-flash-lite-001", + "name": "Gemini 2.0 Flash Lite (OpenRouter)", + "cost_per_1m_in": 0.07525103, + "cost_per_1m_out": 0.30000074, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1048576, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gemini-2-5-flash", + "name": "Gemini 2.5 Flash", + "cost_per_1m_in": 0.105, + "cost_per_1m_out": 3.50076807, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1000000, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gemini-2-5-flash", + "name": "Gemini 2.5 Flash (Audio)", + "cost_per_1m_in": 0.30205761, + "cost_per_1m_out": 2.49996502, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1048576, + "default_max_tokens": 65535, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gemini-2-5-flash", + "name": "Gemini 2.5 Flash (DeepInfra)", + "cost_per_1m_in": 0.3, + "cost_per_1m_out": 2.5, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1000000, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gemini-2-5-flash-preview-04-17", + "name": "Gemini 2.5 Flash (EdenAI)", + "cost_per_1m_in": 0.15, + "cost_per_1m_out": 0.6, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1048576, + "default_max_tokens": 65536, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gemini-2-5-flash", + "name": "Gemini 2.5 Flash (EdenAI)", + "cost_per_1m_in": 3.83723404, + "cost_per_1m_out": 2.48699812, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1048576, + "default_max_tokens": 65536, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gemini-2-5-flash-preview-05-20", + "name": "Gemini 2.5 Flash (EdenAI)", + "cost_per_1m_in": 0.44558101, + "cost_per_1m_out": 0.59899253, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1048576, + "default_max_tokens": 65536, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gemini-2-5-flash-image", + "name": "Gemini 2.5 Flash Image", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gemini-2-5-flash-image-preview", + "name": "Gemini 2.5 Flash Image (EdenAI, Text-Only)", + "cost_per_1m_in": 0.32454992, + "cost_per_1m_out": 29.9995581, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 0, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gemini-2-5-flash-image-preview", + "name": "Gemini 2.5 Flash Image (OpenRouter)", + "cost_per_1m_in": 2.98930041, + "cost_per_1m_out": 2.45159259, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gemini-2-5-flash-image", + "name": "Gemini 2.5 Flash Image (OpenRouter)", + "cost_per_1m_in": 0.30204583, + "cost_per_1m_out": 2.49997856, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gemini-2-5-flash-preview-09-2025", + "name": "Gemini 2.5 Flash Preview (OpenRouter)", + "cost_per_1m_in": 0.30205761, + "cost_per_1m_out": 2.49996526, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1048576, + "default_max_tokens": 65536, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gemini-2-5-flash-lite", + "name": "Gemini 2.5 Flash-Lite", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1048576, + "default_max_tokens": 65535, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gemini-2-5-flash-lite", + "name": "Gemini 2.5 Flash-Lite (EdenAI)", + "cost_per_1m_in": 0.10032733, + "cost_per_1m_out": 0.39999411, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1048576, + "default_max_tokens": 65536, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gemini-2-5-flash-lite", + "name": "Gemini 2.5 Flash-Lite (OpenRouter)", + "cost_per_1m_in": 0.10032922, + "cost_per_1m_out": 0.39999441, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1048576, + "default_max_tokens": 65535, + "can_reason": true, + "has_reasoning_efforts": true, + "supports_attachments": true + }, + { + "id": "gemini-2-5-flash-lite-preview-06-17", + "name": "Gemini 2.5 Flash-Lite (OpenRouter)", + "cost_per_1m_in": 0.10032733, + "cost_per_1m_out": 0.39999412, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1048576, + "default_max_tokens": 65535, + "can_reason": true, + "has_reasoning_efforts": true, + "supports_attachments": true + }, + { + "id": "gemini-2-5-flash-lite-preview-09-2025", + "name": "Gemini 2.5 Flash-Lite (OpenRouter)", + "cost_per_1m_in": 0.10032733, + "cost_per_1m_out": 0.39999445, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1048576, + "default_max_tokens": 65536, + "can_reason": true, + "has_reasoning_efforts": true, + "supports_attachments": true + }, + { + "id": "gemini-2-5-pro", + "name": "Gemini 2.5 Pro", + "cost_per_1m_in": 1.25, + "cost_per_1m_out": 10, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1000000, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gemini-2-5-pro", + "name": "Gemini 2.5 Pro (DeepInfra)", + "cost_per_1m_in": 0.87500412, + "cost_per_1m_out": 174.65012325, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1000000, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gemini-2-5-pro-preview-03-25", + "name": "Gemini 2.5 Pro (EdenAI)", + "cost_per_1m_in": 13.09574468, + "cost_per_1m_out": 9.95689314, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1048576, + "default_max_tokens": 65536, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gemini-2-5-pro", + "name": "Gemini 2.5 Pro (EdenAI)", + "cost_per_1m_in": 18.76432079, + "cost_per_1m_out": 14.9385221, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1048576, + "default_max_tokens": 65536, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gemini-2-5-pro", + "name": "Gemini 2.5 Pro (OpenRouter)", + "cost_per_1m_in": 1.25, + "cost_per_1m_out": 10, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1048576, + "default_max_tokens": 65536, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gemini-2-5-pro-exp-03-25", + "name": "Gemini 2.5 Pro Exp 03-25 (Eden AI)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1000000, + "default_max_tokens": 64000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gemini-2-5-pro-exp-03-25", + "name": "Gemini 2.5 Pro Exp 03-25 (OpenRouter)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1048576, + "default_max_tokens": 65535, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gemini-2-5-pro-exp", + "name": "Gemini 2.5 Pro Experimental", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1000000, + "default_max_tokens": 64000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gemini-2-5-pro-preview-05-06", + "name": "Gemini 2.5 Pro Preview (EdenAI)", + "cost_per_1m_in": 13.64402619, + "cost_per_1m_out": 9.95444092, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1048576, + "default_max_tokens": 65536, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gemini-2-5-pro-preview-05-06", + "name": "Gemini 2.5 Pro Preview (Multimodal)", + "cost_per_1m_in": 1.25, + "cost_per_1m_out": 10, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1048576, + "default_max_tokens": 65535, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gemini-2-5-pro-preview", + "name": "Gemini 2.5 Pro Preview (OpenRouter)", + "cost_per_1m_in": 1.25, + "cost_per_1m_out": 10, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1048576, + "default_max_tokens": 65536, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gemini-1-5-flash-8b-exp", + "name": "Gemini Flash 1.5 8B (Experimental)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1000000, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gemini-pro", + "name": "Gemini Pro (OpenRouter)", + "cost_per_1m_in": 0.50123457, + "cost_per_1m_out": 1.49996954, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32760, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gemini-pro-vision", + "name": "Gemini Pro Vision (OpenRouter)", + "cost_per_1m_in": 0.50123457, + "cost_per_1m_out": 1.49995664, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 16384, + "default_max_tokens": 2048, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gemma-2", + "name": "Gemma 2", + "cost_per_1m_in": 0.10008177, + "cost_per_1m_out": 0.09998712, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8192, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gemma-2-27b-it", + "name": "Gemma 2 27B", + "cost_per_1m_in": 0.80065466, + "cost_per_1m_out": 0.799971, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8192, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gemma-2-27b-it", + "name": "Gemma 2 27B (DeepInfra)", + "cost_per_1m_in": 0.09013019, + "cost_per_1m_out": 0.15999662, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8192, + "default_max_tokens": 2048, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gemma-2-27b-it", + "name": "Gemma 2 27B (OpenRouter)", + "cost_per_1m_in": 0.65053148, + "cost_per_1m_out": 0.64998335, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8192, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gemma-2-9b-it", + "name": "Gemma 2 9B", + "cost_per_1m_in": 0.00400327, + "cost_per_1m_out": 0.10119592, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8192, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gemma-2-9b-it", + "name": "Gemma 2 9B (DeepInfra)", + "cost_per_1m_in": 0.04010578, + "cost_per_1m_out": 0.12999725, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8192, + "default_max_tokens": 2048, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gemma-2-9b-it", + "name": "Gemma 2 9B (EdenAI)", + "cost_per_1m_in": 0.2001634, + "cost_per_1m_out": 0.19999389, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8192, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gemma-2-9b-it", + "name": "Gemma 2 9B (OpenRouter Free)", + "cost_per_1m_in": 0.01002453, + "cost_per_1m_out": 0.02999912, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8192, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gemma-2-9b-it", + "name": "Gemma 2 9B (OpenRouter)", + "cost_per_1m_in": 0.01002453, + "cost_per_1m_out": 0.02999912, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8192, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gemma-2b-it", + "name": "Gemma 2B IT", + "cost_per_1m_in": 0.10008177, + "cost_per_1m_out": 0.09999104, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8192, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gemma-2b-it", + "name": "Gemma 2B IT (Together)", + "cost_per_1m_in": 0.1000813, + "cost_per_1m_out": 0.09999109, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8192, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gemma-3-12b-it", + "name": "Gemma 3 12B", + "cost_per_1m_in": 0.05008137, + "cost_per_1m_out": 0.02947788, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gemma-3-12b-it", + "name": "Gemma 3 12B (DeepInfra)", + "cost_per_1m_in": 0.04010578, + "cost_per_1m_out": 0.12999725, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gemma-3-12b-it", + "name": "Gemma 3 12B (OpenRouter)", + "cost_per_1m_in": 0.03008137, + "cost_per_1m_out": 0.09999788, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gemma-3-12b-it", + "name": "Gemma 3 12B Free (32K)", + "cost_per_1m_in": 0.03008137, + "cost_per_1m_out": 0.09999788, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gemma-3-1b-it", + "name": "Gemma 3 1B (OpenRouter)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gemma-3-27b-it", + "name": "Gemma 3 27B", + "cost_per_1m_in": 0.09013832, + "cost_per_1m_out": 0.1699964, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gemma-3-27b-it", + "name": "Gemma 3 27B (DeepInfra)", + "cost_per_1m_in": 0.09013019, + "cost_per_1m_out": 0.15999662, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gemma-3-27b-it", + "name": "Gemma 3 27B (Free)", + "cost_per_1m_in": 0.09013019, + "cost_per_1m_out": 0.15999662, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gemma-3-27b-it", + "name": "Gemma 3 27B (OpenRouter)", + "cost_per_1m_in": 0.09013019, + "cost_per_1m_out": 0.15999662, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gemma-3-4b-it", + "name": "Gemma 3 4B", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gemma-3-4b-it", + "name": "Gemma 3 4B (DeepInfra, 131K)", + "cost_per_1m_in": 0.04006509, + "cost_per_1m_out": 0.07999831, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gemma-3-4b-it", + "name": "Gemma 3 4B (Free, 32K)", + "cost_per_1m_in": 0.01708706, + "cost_per_1m_out": 0.06815574, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gemma-3-4b-it", + "name": "Gemma 3 4B (OpenRouter, 96K)", + "cost_per_1m_in": 0.01708706, + "cost_per_1m_out": 0.06815574, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 96000, + "default_max_tokens": 96000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gemma-3n-e2b-it", + "name": "Gemma 3n E2B IT", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8192, + "default_max_tokens": 2048, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "gemma-3n-e2b-it", + "name": "Gemma 3n E2B IT (OpenRouter Free)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8192, + "default_max_tokens": 2048, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "gemma-3n-e2b-it", + "name": "Gemma 3n E2B IT (OpenRouter)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8192, + "default_max_tokens": 2048, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "giant-context", + "name": "Giant Context (1M+)", + "cost_per_1m_in": 2.51010509, + "cost_per_1m_out": 0.35902821, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1000000, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "goliath-120b", + "name": "Goliath 120B", + "cost_per_1m_in": 4.00363516, + "cost_per_1m_out": 5.49979643, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 6144, + "default_max_tokens": 512, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "granite-4-0-h-micro", + "name": "Granite 4.0 H Micro (131K)", + "cost_per_1m_in": 0.01708668, + "cost_per_1m_out": 0.10998668, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131000, + "default_max_tokens": 131000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "grok-2", + "name": "Grok 2", + "cost_per_1m_in": 5.15481172, + "cost_per_1m_out": 9.91310099, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "grok-2-latest", + "name": "Grok 2 (Vision)", + "cost_per_1m_in": 2.49372385, + "cost_per_1m_out": 9.98635138, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "grok-2", + "name": "Grok 2 (Vision, EdenAI)", + "cost_per_1m_in": 5.47280335, + "cost_per_1m_out": 9.89103073, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "grok-2-vision", + "name": "Grok 2 Vision", + "cost_per_1m_in": 2.0083682, + "cost_per_1m_out": 9.99980117, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "grok-2-vision-1212", + "name": "Grok 2 Vision (EdenAI)", + "cost_per_1m_in": 5.25523013, + "cost_per_1m_out": 9.91905914, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "grok-2-vision-latest", + "name": "Grok 2 Vision (xAI)", + "cost_per_1m_in": 2, + "cost_per_1m_out": 10, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "grok-2-vision", + "name": "Grok 2 Vision (xAI)", + "cost_per_1m_in": 2, + "cost_per_1m_out": 10, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "grok-3", + "name": "Grok 3", + "cost_per_1m_in": 3.01248959, + "cost_per_1m_out": 14.99971274, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "grok-3", + "name": "Grok 3 (OpenRouter)", + "cost_per_1m_in": 3.01226492, + "cost_per_1m_out": 14.99970197, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "grok-3-beta", + "name": "Grok 3 Beta (OpenRouter)", + "cost_per_1m_in": 3.01226492, + "cost_per_1m_out": 14.99970001, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "grok-3-mini", + "name": "Grok 3 Mini", + "cost_per_1m_in": 0.30041632, + "cost_per_1m_out": 0.49999042, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "grok-3-mini-beta", + "name": "Grok 3 Mini (OpenRouter)", + "cost_per_1m_in": 0.30040883, + "cost_per_1m_out": 0.49998997, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "grok-3-mini", + "name": "Grok 3 Mini (OpenRouter)", + "cost_per_1m_in": 0.30040883, + "cost_per_1m_out": 0.49998997, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "grok-4", + "name": "Grok 4", + "cost_per_1m_in": 12.99584027, + "cost_per_1m_out": 14.75985969, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 256000, + "default_max_tokens": 256000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "grok-4-0709", + "name": "Grok 4 (EdenAI)", + "cost_per_1m_in": 7.88557743, + "cost_per_1m_out": 14.27340641, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 256000, + "default_max_tokens": 256000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "grok-4", + "name": "Grok 4 (OpenRouter)", + "cost_per_1m_in": 3.01226492, + "cost_per_1m_out": 14.99970029, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 256000, + "default_max_tokens": 256000, + "can_reason": true, + "has_reasoning_efforts": true, + "supports_attachments": true + }, + { + "id": "grok-4-fast", + "name": "Grok 4 Fast", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 2000000, + "default_max_tokens": 30000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "grok-4-fast", + "name": "Grok 4 Fast (OpenRouter Free)", + "cost_per_1m_in": 0.20040883, + "cost_per_1m_out": 0.49999021, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 2000000, + "default_max_tokens": 30000, + "can_reason": true, + "has_reasoning_efforts": true, + "supports_attachments": true + }, + { + "id": "grok-4-fast", + "name": "Grok 4 Fast (OpenRouter)", + "cost_per_1m_in": 0.20040883, + "cost_per_1m_out": 0.49999021, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 2000000, + "default_max_tokens": 30000, + "can_reason": true, + "has_reasoning_efforts": true, + "supports_attachments": true + }, + { + "id": "grok-code-fast-1", + "name": "Grok Code Fast 1 (OpenRouter)", + "cost_per_1m_in": 0.20122649, + "cost_per_1m_out": 1.4997197, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 256000, + "default_max_tokens": 10000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "grok-vision-beta", + "name": "Grok Vision Beta (OpenRouter)", + "cost_per_1m_in": 5.0125523, + "cost_per_1m_out": 14.99969738, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8192, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "hermes-2-mixtral-8x7b-dpo", + "name": "Hermes 2 Mixtral 8x7B DPO (32K)", + "cost_per_1m_in": 0.60042493, + "cost_per_1m_out": 0.59995183, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 2048, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "hermes-2-pro-llama-3-8b", + "name": "Hermes 2 Pro Llama 3 8B (OpenRouter)", + "cost_per_1m_in": 0.02506536, + "cost_per_1m_out": 0.07999817, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "hermes-3-llama-3-1-405b", + "name": "Hermes 3 Llama 3.1 405B", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "hermes-3-llama-3-1-405b", + "name": "Hermes 3 Llama 3.1 405B (DeepInfra)", + "cost_per_1m_in": 1.00081699, + "cost_per_1m_out": 0.99994281, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "hermes-3-llama-3-1-405b", + "name": "Hermes 3 Llama 3.1 405B (Free)", + "cost_per_1m_in": 1.00081633, + "cost_per_1m_out": 0.99994594, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "hermes-3-llama-3-1-405b", + "name": "Hermes 3 Llama 3.1 405B (OpenRouter)", + "cost_per_1m_in": 1.00081633, + "cost_per_1m_out": 0.99994594, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "hermes-3-llama-3-1-70b", + "name": "Hermes 3 Llama 3.1 70B", + "cost_per_1m_in": 0.10022895, + "cost_per_1m_out": 0.27998627, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 65536, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "hermes-3-llama-3-1-70b", + "name": "Hermes 3 Llama 3.1 70B (DeepInfra)", + "cost_per_1m_in": 0.3002451, + "cost_per_1m_out": 0.29998555, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "hermes-3-llama-3-1-70b", + "name": "Hermes 3 Llama 3.1 70B (OpenRouter)", + "cost_per_1m_in": 0.3, + "cost_per_1m_out": 0.3, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 65536, + "default_max_tokens": 65536, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "hermes-4-405b", + "name": "Hermes 4 405B (OpenRouter)", + "cost_per_1m_in": 0.30096852, + "cost_per_1m_out": 1.19990766, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": true, + "has_reasoning_efforts": true, + "supports_attachments": false + }, + { + "id": "hermes-4-70b", + "name": "Hermes 4 70B (OpenRouter)", + "cost_per_1m_in": 0.1103067, + "cost_per_1m_out": 0.37980606, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": true, + "has_reasoning_efforts": true, + "supports_attachments": false + }, + { + "id": "horizon", + "name": "Horizon (256K)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 256000, + "default_max_tokens": 128000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "horizon-beta", + "name": "Horizon Beta (256K)", + "cost_per_1m_in": 8.28630025, + "cost_per_1m_out": 9.97363155, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 256000, + "default_max_tokens": 128000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "hunyuan-a13b-instruct", + "name": "Hunyuan A13B Instruct", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "hunyuan-a13b-instruct", + "name": "Hunyuan A13B Instruct (OpenRouter Free)", + "cost_per_1m_in": 0.03869458, + "cost_per_1m_out": 0.02992532, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "hunyuan-a13b-instruct", + "name": "Hunyuan A13B Instruct (OpenRouter)", + "cost_per_1m_in": 0.03869458, + "cost_per_1m_out": 0.02992532, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "image-01", + "name": "Image-01 (Minimax)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 0, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "inflection-3-pi", + "name": "Inflection 3 Pi", + "cost_per_1m_in": 2.50817661, + "cost_per_1m_out": 9.998764, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8000, + "default_max_tokens": 1024, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "inflection-3-productivity", + "name": "Inflection 3 Productivity", + "cost_per_1m_in": 2.50817661, + "cost_per_1m_out": 9.99958151, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8000, + "default_max_tokens": 1024, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "internvl-3-14b", + "name": "InternVL3 14B (OpenRouter)", + "cost_per_1m_in": 0.20055672, + "cost_per_1m_out": 0.39998824, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 12288, + "default_max_tokens": 12288, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "internvl-3-78b", + "name": "InternVL3 78B (OpenRouter)", + "cost_per_1m_in": 0.07, + "cost_per_1m_out": 0.26, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "j2-grande-instruct", + "name": "J2 Grande Instruct (Bedrock)", + "cost_per_1m_in": 30.09868421, + "cost_per_1m_out": 29.53125, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8192, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "j2-mid-v1", + "name": "J2 Mid (Bedrock)", + "cost_per_1m_in": 12.54111842, + "cost_per_1m_out": 12.3046875, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8192, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "j2-ultra", + "name": "J2 Ultra", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8192, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "j2-ultra", + "name": "J2 Ultra (Bedrock)", + "cost_per_1m_in": 18.86184211, + "cost_per_1m_out": 18.50625, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8192, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "j2-ultra", + "name": "J2 Ultra (EdenAI)", + "cost_per_1m_in": 15, + "cost_per_1m_out": 15, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8192, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "j2-mid", + "name": "J2-Mid", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8192, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "j2-mid", + "name": "J2-Mid (Bedrock)", + "cost_per_1m_in": 12.54111842, + "cost_per_1m_out": 12.3046875, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8192, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "j2-mid", + "name": "J2-Mid (EdenAI)", + "cost_per_1m_in": 10, + "cost_per_1m_out": 10, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8192, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "jamba-1-5-large-v1", + "name": "Jamba 1.5 Large (Bedrock)", + "cost_per_1m_in": 2.00624025, + "cost_per_1m_out": 7.99981903, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 256000, + "default_max_tokens": 256000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "jamba-1-5-mini-v1", + "name": "Jamba 1.5 Mini (Bedrock)", + "cost_per_1m_in": 0.20031201, + "cost_per_1m_out": 0.39999061, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 256000, + "default_max_tokens": 256000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "jamba-1-6-large", + "name": "Jamba 1.6 Large (OpenRouter)", + "cost_per_1m_in": 2.00623539, + "cost_per_1m_out": 7.9997074, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 256000, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "jamba-1-6-mini", + "name": "Jamba 1.6 Mini (OpenRouter)", + "cost_per_1m_in": 0.20031177, + "cost_per_1m_out": 0.39999096, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 256000, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "jamba-instruct", + "name": "Jamba Instruct", + "cost_per_1m_in": 0.50049575, + "cost_per_1m_out": 0.69959278, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8191, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "jamba-instruct", + "name": "Jamba Instruct (OpenRouter)", + "cost_per_1m_in": 0.5005456, + "cost_per_1m_out": 0.69998418, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 256000, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "jamba-large-1-7", + "name": "Jamba Large 1.7 (OpenRouter)", + "cost_per_1m_in": 2.00624025, + "cost_per_1m_out": 7.99975005, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 256000, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "jamba-mini-1-7", + "name": "Jamba Mini 1.7 (OpenRouter)", + "cost_per_1m_in": 0.20031201, + "cost_per_1m_out": 0.39999095, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 256000, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "kimi-dev-72b", + "name": "Kimi Dev 72B", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "kimi-dev-72b", + "name": "Kimi Dev 72B (OpenRouter Free)", + "cost_per_1m_in": 0.82857257, + "cost_per_1m_out": 1.09807712, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "kimi-dev-72b", + "name": "Kimi Dev 72B (OpenRouter)", + "cost_per_1m_in": 0.82857257, + "cost_per_1m_out": 1.09807712, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "kimi-k2", + "name": "Kimi K2", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "kimi-k2", + "name": "Kimi K2 (OpenRouter, 63K)", + "cost_per_1m_in": 0.1420211, + "cost_per_1m_out": 2.48992926, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 63000, + "default_max_tokens": 63000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "kimi-k2-0905", + "name": "Kimi K2 0905", + "cost_per_1m_in": 0.39152979, + "cost_per_1m_out": 1.89993269, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 262144, + "default_max_tokens": 262144, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "kimi-k2-0905", + "name": "Kimi K2 0905 (Exacto)", + "cost_per_1m_in": 0.39152979, + "cost_per_1m_out": 1.89993269, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 262144, + "default_max_tokens": 262144, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "kimi-k2", + "name": "Kimi K2 Free (32K)", + "cost_per_1m_in": 0.1420211, + "cost_per_1m_out": 2.48992926, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "kimi-k2-instruct", + "name": "Kimi K2 Instruct", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "kimi-k2-instruct", + "name": "Kimi K2 Instruct (DeepInfra)", + "cost_per_1m_in": 0.40168675, + "cost_per_1m_out": 2.00001904, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "kimi-k2-instruct-0905", + "name": "Kimi K2 Instruct 0905 (DeepInfra)", + "cost_per_1m_in": 0.49951807, + "cost_per_1m_out": 1.99532313, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 262144, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "kimi-vl-a3b-thinking", + "name": "Kimi VL A3B Thinking", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "kimi-vl-a3b-thinking", + "name": "Kimi VL A3B Thinking (OpenRouter Free)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "kimi-vl-a3b-thinking", + "name": "Kimi VL A3B Thinking (OpenRouter)", + "cost_per_1m_in": 0.02, + "cost_per_1m_out": 0.08, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "l3-euryale-70b", + "name": "L3 Euryale 70B", + "cost_per_1m_in": 1.48120718, + "cost_per_1m_out": 1.47994721, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8192, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "l3-lunaris-8b", + "name": "L3 Lunaris 8B", + "cost_per_1m_in": 0.04004078, + "cost_per_1m_out": 0.04999803, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8192, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "l3-3-70b-euryale-v2-3", + "name": "L3.3 70B Euryale v2.3 (DeepInfra)", + "cost_per_1m_in": 0.6506, + "cost_per_1m_out": 0.74996152, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "l3-3-electra-r1-70b", + "name": "L3.3 Electra R1 70B (OpenRouter)", + "cost_per_1m_in": 0.70076675, + "cost_per_1m_out": 0.94994249, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "lfm-2-2-6b", + "name": "LFM 2.2 6B (OpenRouter)", + "cost_per_1m_in": 0.05007651, + "cost_per_1m_out": 0.09999793, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "lfm-2-8b-a1b", + "name": "LFM-2 8B (OpenRouter)", + "cost_per_1m_in": 0.05007651, + "cost_per_1m_out": 0.09999793, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-2-13b-tiefighter", + "name": "LLaMA2-13B Tiefighter (DeepInfra)", + "cost_per_1m_in": 0.06004024, + "cost_per_1m_out": 0.05999515, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 4096, + "default_max_tokens": 1024, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "large-context", + "name": "Large Context (128K+)", + "cost_per_1m_in": 0.00200312, + "cost_per_1m_out": 0.00035191, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 2048, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "learnlm-1-5-pro-experimental", + "name": "LearnLM 1.5 Pro Experimental (OpenRouter)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32767, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "ling-1t", + "name": "Ling-1T (OpenRouter)", + "cost_per_1m_in": 0.96600791, + "cost_per_1m_out": 1.97466717, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-2-13b", + "name": "Llama 2 13B", + "cost_per_1m_in": 0.06004024, + "cost_per_1m_out": 0.05878498, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 4096, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-2-13b-chat-hf", + "name": "Llama 2 13B Chat (DeepInfra)", + "cost_per_1m_in": 0.03004898, + "cost_per_1m_out": 0.05999829, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 4096, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-2-13b-chat-v1", + "name": "Llama 2 13B Chat (EdenAI)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 4096, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-2-70b", + "name": "Llama 2 70B", + "cost_per_1m_in": 0.30032653, + "cost_per_1m_out": 0.92698531, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 4096, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-2-70b-chat-v1", + "name": "Llama 2 70B Chat (EdenAI)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 4096, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-2-70b-chat", + "name": "Llama 2 70B Chat (OpenRouter)", + "cost_per_1m_in": 0.90059445, + "cost_per_1m_out": 0.89878987, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 4096, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-70b-8192", + "name": "Llama 3 70B (EdenAI)", + "cost_per_1m_in": 0.5906449, + "cost_per_1m_out": 0.7899813, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8192, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-70b-instruct", + "name": "Llama 3 70B Instruct", + "cost_per_1m_in": 0.30032653, + "cost_per_1m_out": 3.58974447, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8192, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-70b-instruct-v1", + "name": "Llama 3 70B Instruct (Bedrock PDF)", + "cost_per_1m_in": 2.65284784, + "cost_per_1m_out": 3.49989661, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8192, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-70b-instruct-v1", + "name": "Llama 3 70B Instruct (Bedrock US)", + "cost_per_1m_in": 2.65284784, + "cost_per_1m_out": 3.49989661, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 0, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-70b-instruct", + "name": "Llama 3 70B Instruct (DeepInfra)", + "cost_per_1m_in": 0.40032653, + "cost_per_1m_out": 0.39999005, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8192, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-70b-instruct-v1", + "name": "Llama 3 70B Instruct (EdenAI 8K)", + "cost_per_1m_in": 2.8600000000000003, + "cost_per_1m_out": 3.78, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8192, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-70b-instruct-v1", + "name": "Llama 3 70B Instruct (EdenAI)", + "cost_per_1m_in": 7.02003271, + "cost_per_1m_out": 3.53541128, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 0, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-70b-instruct", + "name": "Llama 3 70B Instruct (OpenRouter)", + "cost_per_1m_in": 0.30032653, + "cost_per_1m_out": 0.39999053, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8192, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-8b-8192", + "name": "Llama 3 8B (EdenAI)", + "cost_per_1m_in": 0.05006531, + "cost_per_1m_out": 0.07999811, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8192, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-8b-instruct", + "name": "Llama 3 8B Instruct", + "cost_per_1m_in": 0.03004898, + "cost_per_1m_out": 2.66134826, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8192, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-8b-instruct-v1", + "name": "Llama 3 8B Instruct (Bedrock PDF)", + "cost_per_1m_in": 0.3004882, + "cost_per_1m_out": 0.59997455, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8192, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-8b-instruct-v1", + "name": "Llama 3 8B Instruct (Bedrock US)", + "cost_per_1m_in": 0.3004882, + "cost_per_1m_out": 0.59997455, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 0, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-8b-instruct", + "name": "Llama 3 8B Instruct (DeepInfra)", + "cost_per_1m_in": 0.03004898, + "cost_per_1m_out": 0.05999815, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8192, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-8b-instruct-v1", + "name": "Llama 3 8B Instruct (EdenAI v1)", + "cost_per_1m_in": 0.3, + "cost_per_1m_out": 0.6, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8192, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-8b-instruct-v1", + "name": "Llama 3 8B Instruct (EdenAI)", + "cost_per_1m_in": 1.20343418, + "cost_per_1m_out": 0.59872221, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 0, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-8b-instruct", + "name": "Llama 3 8B Instruct (OpenRouter)", + "cost_per_1m_in": 0.03004898, + "cost_per_1m_out": 0.05999844, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8192, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-8b-instruct-lite", + "name": "Llama 3 8B Instruct Lite (Together)", + "cost_per_1m_in": 0.10008163, + "cost_per_1m_out": 0.09999698, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8192, + "default_max_tokens": 2048, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "l3-8b-lunaris-v1-turbo", + "name": "Llama 3 8B Lunaris v1 Turbo (DeepInfra)", + "cost_per_1m_in": 0.04004082, + "cost_per_1m_out": 0.0499973, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8192, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-1-405b", + "name": "Llama 3.1 405B (OpenRouter)", + "cost_per_1m_in": 6, + "cost_per_1m_out": 3.996, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-1-405b-instruct", + "name": "Llama 3.1 405B Instruct", + "cost_per_1m_in": 5.33301871, + "cost_per_1m_out": 0.52340187, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-1-405b-instruct", + "name": "Llama 3.1 405B Instruct (DeepInfra)", + "cost_per_1m_in": 1.00081699, + "cost_per_1m_out": 0.99994692, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-1-405b-instruct-v1", + "name": "Llama 3.1 405B Instruct (EdenAI)", + "cost_per_1m_in": 5.33301871, + "cost_per_1m_out": 15.99951291, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-1-405b-instruct", + "name": "Llama 3.1 405B Instruct (OpenRouter)", + "cost_per_1m_in": 0.80065306, + "cost_per_1m_out": 0.79997877, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-1-405b-instruct-turbo", + "name": "Llama 3.1 405B Instruct Turbo (Together)", + "cost_per_1m_in": 3.5028, + "cost_per_1m_out": 3.4998488, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 130815, + "default_max_tokens": 2048, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "l3-1-70b-hanami-x1", + "name": "Llama 3.1 70B Hanami X1 (16K)", + "cost_per_1m_in": 3.0024, + "cost_per_1m_out": 2.9998288, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 16000, + "default_max_tokens": 16000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-1-70b-instruct", + "name": "Llama 3.1 70B Instruct", + "cost_per_1m_in": 2.65284784, + "cost_per_1m_out": 0.93423376, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 2048, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-1-70b-instruct-v1", + "name": "Llama 3.1 70B Instruct (Bedrock)", + "cost_per_1m_in": 0.99080553, + "cost_per_1m_out": 0.98997342, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 2048, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-1-70b-instruct", + "name": "Llama 3.1 70B Instruct (DeepInfra)", + "cost_per_1m_in": 0.40032653, + "cost_per_1m_out": 0.39999053, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-1-70b-instruct-v1", + "name": "Llama 3.1 70B Instruct (EdenAI Vision)", + "cost_per_1m_in": 0.99080553, + "cost_per_1m_out": 0.98997342, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 2048, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-1-70b-instruct", + "name": "Llama 3.1 70B Instruct (OpenRouter)", + "cost_per_1m_in": 0.40032653, + "cost_per_1m_out": 0.39998967, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-1-70b-instruct-turbo", + "name": "Llama 3.1 70B Instruct Turbo", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 2048, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-1-70b-instruct-turbo", + "name": "Llama 3.1 70B Instruct Turbo (DeepInfra)", + "cost_per_1m_in": 0.40032653, + "cost_per_1m_out": 0.39998912, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-1-70b-instruct-turbo", + "name": "Llama 3.1 70B Instruct Turbo (Together)", + "cost_per_1m_in": 0.880704, + "cost_per_1m_out": 0.87996048, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 2048, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-1-8b-instant", + "name": "Llama 3.1 8B Instant (EdenAI)", + "cost_per_1m_in": 0.050064, + "cost_per_1m_out": 0.07999654, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8192, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-1-8b-instruct", + "name": "Llama 3.1 8B Instruct", + "cost_per_1m_in": 0.22017886, + "cost_per_1m_out": 0.21999392, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8192, + "default_max_tokens": 2048, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-1-8b-instruct-v1", + "name": "Llama 3.1 8B Instruct (Bedrock)", + "cost_per_1m_in": 0.22017886, + "cost_per_1m_out": 0.21999392, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 2048, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-1-8b-instruct", + "name": "Llama 3.1 8B Instruct (DeepInfra 131K)", + "cost_per_1m_in": 0.03004082, + "cost_per_1m_out": 0.04999882, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-1-8b-instruct-v1", + "name": "Llama 3.1 8B Instruct (EdenAI Vision)", + "cost_per_1m_in": 0.22017886, + "cost_per_1m_out": 0.21999392, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 2048, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-1-8b-instruct", + "name": "Llama 3.1 8B Instruct (NousResearch)", + "cost_per_1m_in": 0.02002447, + "cost_per_1m_out": 0.02999917, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8192, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-1-8b-instruct", + "name": "Llama 3.1 8B Instruct (OpenRouter)", + "cost_per_1m_in": 0.02002447, + "cost_per_1m_out": 0.02999917, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 16384, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-1-8b-instruct-turbo", + "name": "Llama 3.1 8B Instruct Turbo (DeepInfra)", + "cost_per_1m_in": 0.02002449, + "cost_per_1m_out": 0.02999929, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-1-large", + "name": "Llama 3.1 Large", + "cost_per_1m_in": 0.70065359, + "cost_per_1m_out": 0.26258105, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 128000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-1-lumimaid-8b", + "name": "Llama 3.1 Lumimaid 8B", + "cost_per_1m_in": 0.0904894, + "cost_per_1m_out": 0.5999408, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-1-nemotron-70b-instruct", + "name": "Llama 3.1 Nemotron 70B (DeepInfra)", + "cost_per_1m_in": 0.6004898, + "cost_per_1m_out": 0.5999858, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-1-nemotron-70b-instruct", + "name": "Llama 3.1 Nemotron 70B (OpenRouter)", + "cost_per_1m_in": 0.6004898, + "cost_per_1m_out": 0.5999858, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-1-nemotron-70b-instruct", + "name": "Llama 3.1 Nemotron 70B Instruct", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-1-nemotron-ultra-253b-v1", + "name": "Llama 3.1 Nemotron Ultra 253B (OpenRouter)", + "cost_per_1m_in": 0.60146341, + "cost_per_1m_out": 1.79995024, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-1-swallow-70b-instruct-v0-3", + "name": "Llama 3.1 Swallow 70B Instruct (OpenRouter)", + "cost_per_1m_in": 0.60097959, + "cost_per_1m_out": 1.19994749, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 16384, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-2-11b-instruct", + "name": "Llama 3.2 11B Instruct", + "cost_per_1m_in": 0.45056, + "cost_per_1m_out": 0.69996797, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-2-11b-instruct-v1", + "name": "Llama 3.2 11B Instruct (Vision)", + "cost_per_1m_in": 0.35028, + "cost_per_1m_out": 0.34998444, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "llama-3-2-11b-vision-instruct", + "name": "Llama 3.2 11B Vision", + "cost_per_1m_in": 0.04903673, + "cost_per_1m_out": 0.04900307, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "llama-3-2-11b-vision-instruct", + "name": "Llama 3.2 11B Vision (DeepInfra)", + "cost_per_1m_in": 0.04903673, + "cost_per_1m_out": 0.04899993, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "llama-3-2-11b-vision-instruct", + "name": "Llama 3.2 11B Vision (OpenRouter)", + "cost_per_1m_in": 0.04903673, + "cost_per_1m_out": 0.04899456, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "llama-3-2-1b-instruct", + "name": "Llama 3.2 1B Instruct", + "cost_per_1m_in": 0.220176, + "cost_per_1m_out": 0.0037649, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-2-1b-instruct-v1", + "name": "Llama 3.2 1B Instruct (Bedrock)", + "cost_per_1m_in": 0.10008, + "cost_per_1m_out": 0.09999407, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-2-1b-instruct", + "name": "Llama 3.2 1B Instruct (DeepInfra)", + "cost_per_1m_in": 0.02001633, + "cost_per_1m_out": 0.01999952, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-2-1b-instruct", + "name": "Llama 3.2 1B Instruct (OpenRouter)", + "cost_per_1m_in": 0.00500816, + "cost_per_1m_out": 0.01000476, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-2-3b-instruct", + "name": "Llama 3.2 3B Instruct", + "cost_per_1m_in": 0.45056, + "cost_per_1m_out": -0.00697624, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 16384, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-2-3b-instruct-v1", + "name": "Llama 3.2 3B Instruct (Bedrock)", + "cost_per_1m_in": 0.15012, + "cost_per_1m_out": 0.14999209, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-2-3b-instruct", + "name": "Llama 3.2 3B Instruct (DeepInfra, 131K)", + "cost_per_1m_in": 0.02001633, + "cost_per_1m_out": 0.01999953, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-2-3b-instruct", + "name": "Llama 3.2 3B Instruct (Free, 131K)", + "cost_per_1m_in": 0.02001631, + "cost_per_1m_out": 0.01999948, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-2-3b-instruct", + "name": "Llama 3.2 3B Instruct (OpenRouter)", + "cost_per_1m_in": 0.02001631, + "cost_per_1m_out": 0.01999948, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 16384, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-2-90b-instruct-v1", + "name": "Llama 3.2 90B Instruct (Vision)", + "cost_per_1m_in": 2.0016, + "cost_per_1m_out": 1.99991148, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "llama-3-2-90b-vision-instruct", + "name": "Llama 3.2 90B Vision", + "cost_per_1m_in": 1.20097879, + "cost_per_1m_out": 0.37532162, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "llama-3-2-90b-vision-instruct", + "name": "Llama 3.2 90B Vision (DeepInfra)", + "cost_per_1m_in": 0.15048741, + "cost_per_1m_out": 0.59998422, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "llama-3-2-90b-vision-instruct", + "name": "Llama 3.2 90B Vision (OpenRouter)", + "cost_per_1m_in": 0.35032494, + "cost_per_1m_out": 0.39998911, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "llama-3-2-large", + "name": "Llama 3.2 Large", + "cost_per_1m_in": 1.20097879, + "cost_per_1m_out": 1.19997064, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 128000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-2-small", + "name": "Llama 3.2 Small", + "cost_per_1m_in": 0.04903673, + "cost_per_1m_out": 0.04899584, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 128000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-3-70b-instruct", + "name": "Llama 3.3 70B Instruct", + "cost_per_1m_in": 0.03809951, + "cost_per_1m_out": 0.75682263, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-3-70b-instruct-v1", + "name": "Llama 3.3 70B Instruct (Bedrock)", + "cost_per_1m_in": 0.720576, + "cost_per_1m_out": 0.7199689, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-3-70b-instruct", + "name": "Llama 3.3 70B Instruct (DeepInfra)", + "cost_per_1m_in": 0.23032653, + "cost_per_1m_out": 0.39999053, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-3-70b-instruct", + "name": "Llama 3.3 70B Instruct (Free)", + "cost_per_1m_in": 0.13030995, + "cost_per_1m_out": 0.37998983, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-3-70b-instruct", + "name": "Llama 3.3 70B Instruct (OpenRouter)", + "cost_per_1m_in": 0.13030995, + "cost_per_1m_out": 0.37998983, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-3-70b-instruct-turbo", + "name": "Llama 3.3 70B Instruct Turbo", + "cost_per_1m_in": 0.880704, + "cost_per_1m_out": 0.09555958, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 2048, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-3-70b-instruct-turbo", + "name": "Llama 3.3 70B Instruct Turbo (DeepInfra)", + "cost_per_1m_in": 0.1303102, + "cost_per_1m_out": 0.379991, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-3-70b-instruct-turbo", + "name": "Llama 3.3 70B Instruct Turbo (Together)", + "cost_per_1m_in": 0.880704, + "cost_per_1m_out": 0.87996198, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 2048, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-3-70b-versatile", + "name": "Llama 3.3 70B Versatile (EdenAI)", + "cost_per_1m_in": 0.590632, + "cost_per_1m_out": 0.78996233, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-3-8b-instruct", + "name": "Llama 3.3 8B Instruct", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 4028, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-3-8b-instruct", + "name": "Llama 3.3 8B Instruct (OpenRouter Free)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 4028, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-3-8b-instruct", + "name": "Llama 3.3 8B Instruct (OpenRouter)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 4028, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-3-nemotron-super-49b", + "name": "Llama 3.3 Nemotron Super 49B", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-3-nemotron-super-49b-1-5", + "name": "Llama 3.3 Nemotron Super 49B (131K)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-3-nemotron-super-49b-v1-5", + "name": "Llama 3.3 Nemotron Super 49B (DeepInfra)", + "cost_per_1m_in": 0.10032415, + "cost_per_1m_out": 0.39998768, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-3-3-nemotron-super-49b-v1-5", + "name": "Llama 3.3 Nemotron Super 49B (OpenRouter)", + "cost_per_1m_in": 0.10032415, + "cost_per_1m_out": 0.39998768, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-4", + "name": "Llama 4", + "cost_per_1m_in": 0.0802437, + "cost_per_1m_out": 0.29999318, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-4-maverick", + "name": "Llama 4 Maverick", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 4028, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-4-maverick", + "name": "Llama 4 Maverick (OpenRouter Free)", + "cost_per_1m_in": 0.15048741, + "cost_per_1m_out": 0.59998541, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 4028, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "llama-4-maverick", + "name": "Llama 4 Maverick (OpenRouter, 1M)", + "cost_per_1m_in": 0.15048741, + "cost_per_1m_out": 0.59998541, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1048576, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "llama-4-maverick-17b-128e-instruct-fp8", + "name": "Llama 4 Maverick 17B (DeepInfra)", + "cost_per_1m_in": 0.15048741, + "cost_per_1m_out": 0.59998513, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1048576, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "llama-4-maverick-17b-128e-instruct-turbo", + "name": "Llama 4 Maverick 17B (DeepInfra)", + "cost_per_1m_in": 0.15048741, + "cost_per_1m_out": 0.59998657, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8192, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "llama-4-maverick-17b-instruct-v1", + "name": "Llama 4 Maverick 17B (Vision)", + "cost_per_1m_in": 0.24077229, + "cost_per_1m_out": 0.96995452, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "llama-4-maverick-17b-instruct", + "name": "Llama 4 Maverick 17B Instruct", + "cost_per_1m_in": 0.00023885, + "cost_per_1m_out": 0.60451793, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-4-scout", + "name": "Llama 4 Scout", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 4028, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-4-scout", + "name": "Llama 4 Scout (327K)", + "cost_per_1m_in": 0.0802437, + "cost_per_1m_out": 0.29999256, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 327680, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "llama-4-scout", + "name": "Llama 4 Scout (Free)", + "cost_per_1m_in": 0.0802437, + "cost_per_1m_out": 0.29999256, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 4028, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "llama-4-scout-17b-instruct-v1", + "name": "Llama 4 Scout 17B (Bedrock)", + "cost_per_1m_in": 0.17052548, + "cost_per_1m_out": 0.65996888, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "llama-4-scout-17b-16e-instruct", + "name": "Llama 4 Scout 17B (DeepInfra)", + "cost_per_1m_in": 0.0802437, + "cost_per_1m_out": 0.29999342, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 327680, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "llama-4-scout-17b-instruct", + "name": "Llama 4 Scout 17B Instruct", + "cost_per_1m_in": 0.0001672, + "cost_per_1m_out": 0.30215549, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-guard-2-8b", + "name": "Llama Guard 2 8B (OpenRouter)", + "cost_per_1m_in": 0.20016447, + "cost_per_1m_out": 0.19997815, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8192, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-guard-3-8b", + "name": "Llama Guard 3 8B", + "cost_per_1m_in": 0.02004934, + "cost_per_1m_out": 3.88459704, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-guard-3-8b", + "name": "Llama Guard 3 8B (DeepInfra)", + "cost_per_1m_in": 0.05503887, + "cost_per_1m_out": 0.05324382, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-guard-3-8b", + "name": "Llama Guard 3 8B (OpenRouter)", + "cost_per_1m_in": 0.02004934, + "cost_per_1m_out": 0.05999906, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-guard-4-12b", + "name": "Llama Guard 4 12B", + "cost_per_1m_in": 0.05003509, + "cost_per_1m_out": 0.0474152, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 163840, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "llama-guard-4-12b", + "name": "Llama Guard 4 12B (DeepInfra)", + "cost_per_1m_in": 0.18012632, + "cost_per_1m_out": 0.17069474, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 163840, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "llama-guard-4-12b", + "name": "Llama Guard 4 12B (OpenRouter)", + "cost_per_1m_in": 0.18012632, + "cost_per_1m_out": 0.16635789, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 163840, + "default_max_tokens": 163840, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "longcat-flash-chat", + "name": "LongCat Flash Chat (Free)", + "cost_per_1m_in": 0.15060729, + "cost_per_1m_out": 0.749983, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "longcat-flash-chat", + "name": "LongCat Flash Chat (Tools)", + "cost_per_1m_in": 0.15060729, + "cost_per_1m_out": 0.749983, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "longcat-flash", + "name": "Longcat Flash", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mai-ds-r1", + "name": "MAI-DS-R1", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 163840, + "default_max_tokens": 163840, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mai-ds-r1", + "name": "MAI-DS-R1 (OpenRouter Free)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 163840, + "default_max_tokens": 163840, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mai-ds-r1", + "name": "MAI-DS-R1 (OpenRouter)", + "cost_per_1m_in": 0.3, + "cost_per_1m_out": 1.2, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 163840, + "default_max_tokens": 163840, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mn-celeste-12b", + "name": "MN-Celeste 12B (16K)", + "cost_per_1m_in": 0.80196239, + "cost_per_1m_out": 1.19991861, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 16384, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "maestro-reasoning", + "name": "Maestro Reasoning (OpenRouter)", + "cost_per_1m_in": 0.90269388, + "cost_per_1m_out": 3.29992188, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 32000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "magistral-medium", + "name": "Magistral Medium", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 40000, + "default_max_tokens": 40000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "magistral-medium-2506", + "name": "Magistral Medium (EdenAI)", + "cost_per_1m_in": 2.00401284, + "cost_per_1m_out": 4.99991172, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 40000, + "default_max_tokens": 40000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "magistral-medium-2506", + "name": "Magistral Medium (OpenRouter)", + "cost_per_1m_in": 2.00401284, + "cost_per_1m_out": 4.9998954, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 40960, + "default_max_tokens": 40000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "magistral-medium-2506", + "name": "Magistral Medium Thinking (OpenRouter)", + "cost_per_1m_in": 2.00401284, + "cost_per_1m_out": 4.9998954, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 40960, + "default_max_tokens": 40000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "magistral-small", + "name": "Magistral Small", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 40000, + "default_max_tokens": 40000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "magistral-small-2506", + "name": "Magistral Small (EdenAI)", + "cost_per_1m_in": 0.50120385, + "cost_per_1m_out": 1.49997352, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 40000, + "default_max_tokens": 40000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "magistral-small-2506", + "name": "Magistral Small (OpenRouter)", + "cost_per_1m_in": 0.50120385, + "cost_per_1m_out": 1.49995167, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 40000, + "default_max_tokens": 40000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "magnum-v2-72b", + "name": "Magnum v2 72B", + "cost_per_1m_in": 3.00490597, + "cost_per_1m_out": 2.99967213, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "magnum-v4-72b", + "name": "Magnum v4 72B (16K)", + "cost_per_1m_in": 3.00408831, + "cost_per_1m_out": 4.99977146, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 16384, + "default_max_tokens": 2048, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "marin-8b-instruct", + "name": "Marin 8B Instruct", + "cost_per_1m_in": 0.18011125, + "cost_per_1m_out": 0.17992266, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 4096, + "default_max_tokens": 2048, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mercury", + "name": "Mercury", + "cost_per_1m_in": 0.25084034, + "cost_per_1m_out": 0.99999868, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mercury", + "name": "Mercury (OpenRouter)", + "cost_per_1m_in": 0.25082237, + "cost_per_1m_out": 0.99997123, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mercury-coder", + "name": "Mercury Coder (128K)", + "cost_per_1m_in": 0.2508244, + "cost_per_1m_out": 0.99997854, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mercury-coder-small-beta", + "name": "Mercury Coder Small (OpenRouter)", + "cost_per_1m_in": 0.2508244, + "cost_per_1m_out": 0.99997854, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32000, + "default_max_tokens": 32000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "minimax", + "name": "MiniMax", + "cost_per_1m_in": 0.30398792, + "cost_per_1m_out": 1.00653926, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 204800, + "default_max_tokens": 131072, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "minimax-text-01", + "name": "MiniMax Text 01 (EdenAI)", + "cost_per_1m_in": 0.59958005, + "cost_per_1m_out": 0.73864065, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 0, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "minimax-01", + "name": "MiniMax-01 (OpenRouter)", + "cost_per_1m_in": 0.20057743, + "cost_per_1m_out": 1.09948103, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1000192, + "default_max_tokens": 1000192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "minimax-m1", + "name": "MiniMax-M1", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1000000, + "default_max_tokens": 40000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "minimax-m1", + "name": "MiniMax-M1 (EdenAI)", + "cost_per_1m_in": 2.32882175, + "cost_per_1m_out": 1.70244102, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 0, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "minimax-m1", + "name": "MiniMax-M1 (OpenRouter)", + "cost_per_1m_in": 0.40531722, + "cost_per_1m_out": 2.19745837, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1000000, + "default_max_tokens": 40000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "minimax-m2", + "name": "MiniMax-M2 (OpenRouter)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 204800, + "default_max_tokens": 131072, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "ministral-3b", + "name": "Ministral 3B (OpenRouter)", + "cost_per_1m_in": 0.0400321, + "cost_per_1m_out": 0.03999888, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "ministral-8b", + "name": "Ministral 8B", + "cost_per_1m_in": 0.10008026, + "cost_per_1m_out": 0.0999969, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 128000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "ministral-8b", + "name": "Ministral 8B (OpenRouter)", + "cost_per_1m_in": 0.10008026, + "cost_per_1m_out": 0.09999661, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "open-mistral-7b", + "name": "Mistral 7B (EdenAI)", + "cost_per_1m_in": 0.25, + "cost_per_1m_out": 0.25, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8191, + "default_max_tokens": 8191, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mistral-7b-instruct", + "name": "Mistral 7B Instruct", + "cost_per_1m_in": 0.15014154, + "cost_per_1m_out": 0.04762305, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32000, + "default_max_tokens": 8191, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mistral-7b-instruct-0-1", + "name": "Mistral 7B Instruct", + "cost_per_1m_in": 0.02803698, + "cost_per_1m_out": 0.19471469, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 2824, + "default_max_tokens": 2048, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mistral-7b-instruct", + "name": "Mistral 7B Instruct (Free)", + "cost_per_1m_in": 0.02804116, + "cost_per_1m_out": 0.05400316, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mistral-7b-instruct", + "name": "Mistral 7B Instruct (OpenRouter)", + "cost_per_1m_in": 0.02804116, + "cost_per_1m_out": 0.05400316, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mistral-7b-instruct-v0-1", + "name": "Mistral 7B Instruct v0.1 (DeepInfra 32K)", + "cost_per_1m_in": 0.07516051, + "cost_per_1m_out": 0.19999604, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mistral-7b-instruct-v0-1", + "name": "Mistral 7B Instruct v0.1 (OpenRouter)", + "cost_per_1m_in": 0.11013456, + "cost_per_1m_out": 0.18998192, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 2824, + "default_max_tokens": 2824, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mistral-7b-instruct-v0-1", + "name": "Mistral 7B Instruct v0.1 (Together 32K)", + "cost_per_1m_in": 0.20014164, + "cost_per_1m_out": 0.19998439, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 2048, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mistral-7b-instruct-0-2", + "name": "Mistral 7B Instruct v0.2", + "cost_per_1m_in": 0.02803698, + "cost_per_1m_out": 0.20684282, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mistral-7b-instruct-v0", + "name": "Mistral 7B Instruct v0.2 (Bedrock)", + "cost_per_1m_in": 0.15014174, + "cost_per_1m_out": 0.1999914, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32000, + "default_max_tokens": 8191, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mistral-7b-instruct-v0-2", + "name": "Mistral 7B Instruct v0.2 (OpenRouter)", + "cost_per_1m_in": 0.20014194, + "cost_per_1m_out": 0.19999024, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mistral-7b-instruct-v0-2", + "name": "Mistral 7B Instruct v0.2 (Together)", + "cost_per_1m_in": 0.20014205, + "cost_per_1m_out": 0.19999476, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mistral-7b-instruct-0-3", + "name": "Mistral 7B Instruct v0.3", + "cost_per_1m_in": 0.20014194, + "cost_per_1m_out": 0.04660902, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 2048, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mistral-7b-instruct-v0-3", + "name": "Mistral 7B Instruct v0.3 (DeepInfra)", + "cost_per_1m_in": 0.07516051, + "cost_per_1m_out": 0.19999577, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mistral-7b-instruct-v0-3", + "name": "Mistral 7B Instruct v0.3 (OpenRouter)", + "cost_per_1m_in": 0.02804296, + "cost_per_1m_out": 0.05399412, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mistral-7b-instruct-v0-3", + "name": "Mistral 7B Instruct v0.3 (Together)", + "cost_per_1m_in": 0.20014205, + "cost_per_1m_out": 0.19999492, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 2048, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mistral-large", + "name": "Mistral Large", + "cost_per_1m_in": 2.00426136, + "cost_per_1m_out": 31.57849641, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8191, + "default_max_tokens": 8191, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mistral-large-latest", + "name": "Mistral Large (EdenAI)", + "cost_per_1m_in": 55.22158626, + "cost_per_1m_out": 26.95884785, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32000, + "default_max_tokens": 8000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mistral-large", + "name": "Mistral Large (OpenRouter)", + "cost_per_1m_in": 2.00426439, + "cost_per_1m_out": 5.99988302, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 128000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mistral-large-2402-v1", + "name": "Mistral Large 2402 (Bedrock)", + "cost_per_1m_in": 8.01705757, + "cost_per_1m_out": 23.99916607, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32000, + "default_max_tokens": 8191, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mistral-large-2402", + "name": "Mistral Large 2402 (EdenAI)", + "cost_per_1m_in": 4, + "cost_per_1m_out": 12, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8191, + "default_max_tokens": 8191, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mistral-large-2407", + "name": "Mistral Large 2407 (EdenAI)", + "cost_per_1m_in": 3, + "cost_per_1m_out": 9, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 128000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mistral-large-2407", + "name": "Mistral Large 2407 (OpenRouter)", + "cost_per_1m_in": 2.00426439, + "cost_per_1m_out": 5.99989315, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mistral-large-2411", + "name": "Mistral Large 2411 (OpenRouter)", + "cost_per_1m_in": 2.00426439, + "cost_per_1m_out": 5.99989599, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mistral-large-latest", + "name": "Mistral Large Vision (EdenAI)", + "cost_per_1m_in": 2.00426136, + "cost_per_1m_out": 5.99990625, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32000, + "default_max_tokens": 32000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "mistral-medium", + "name": "Mistral Medium", + "cost_per_1m_in": 2.7565008, + "cost_per_1m_out": 1.94519766, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8191, + "default_max_tokens": 8191, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mistral-medium-2312", + "name": "Mistral Medium (EdenAI)", + "cost_per_1m_in": 2.7, + "cost_per_1m_out": 8.1, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8191, + "default_max_tokens": 8191, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mistral-medium-latest", + "name": "Mistral Medium (EdenAI)", + "cost_per_1m_in": 0.40160514, + "cost_per_1m_out": 1.99996469, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32000, + "default_max_tokens": 8191, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mistral-medium", + "name": "Mistral Medium (EdenAI)", + "cost_per_1m_in": 2.7, + "cost_per_1m_out": 8.1, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8191, + "default_max_tokens": 8191, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mistral-medium", + "name": "Mistral Medium (EdenAI)", + "cost_per_1m_in": 19.6572363, + "cost_per_1m_out": 9.03561541, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8191, + "default_max_tokens": 8191, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mistral-medium-3", + "name": "Mistral Medium 3", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mistral-medium-3", + "name": "Mistral Medium 3 (Vision)", + "cost_per_1m_in": 0.40160514, + "cost_per_1m_out": 1.99996469, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "mistral-medium-3-1", + "name": "Mistral Medium 3.1 (OpenRouter)", + "cost_per_1m_in": 0.40160514, + "cost_per_1m_out": 1.99996469, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "mistral-nemo", + "name": "Mistral Nemo", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "open-mistral-nemo", + "name": "Mistral Nemo", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 128000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "open-mistral-nemo-2407", + "name": "Mistral Nemo (EdenAI)", + "cost_per_1m_in": 0.3, + "cost_per_1m_out": 0.3, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 128000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "open-mistral-nemo", + "name": "Mistral Nemo (EdenAI)", + "cost_per_1m_in": 0.3, + "cost_per_1m_out": 0.3, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 128000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mistral-nemo", + "name": "Mistral Nemo (OpenRouter Free)", + "cost_per_1m_in": 0.020032, + "cost_per_1m_out": 0.03999902, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 128000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mistral-nemo", + "name": "Mistral Nemo (OpenRouter)", + "cost_per_1m_in": 0.020032, + "cost_per_1m_out": 0.03999902, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mistral-nemo-instruct-2407", + "name": "Mistral Nemo Instruct (DeepInfra)", + "cost_per_1m_in": 0.0200321, + "cost_per_1m_out": 0.03999885, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mistral-saba", + "name": "Mistral Saba", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32000, + "default_max_tokens": 32000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mistral-saba-latest", + "name": "Mistral Saba (EdenAI)", + "cost_per_1m_in": 0.19999999999999998, + "cost_per_1m_out": 0.6, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32000, + "default_max_tokens": 32000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mistral-saba", + "name": "Mistral Saba (OpenRouter)", + "cost_per_1m_in": 0.20048154, + "cost_per_1m_out": 0.59998619, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mistral-small", + "name": "Mistral Small", + "cost_per_1m_in": 1.00213068, + "cost_per_1m_out": 0.27588472, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8191, + "default_max_tokens": 8191, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mistral-small-2402-v1", + "name": "Mistral Small (Bedrock)", + "cost_per_1m_in": 1.0021322, + "cost_per_1m_out": 2.99991133, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32000, + "default_max_tokens": 8191, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mistral-small", + "name": "Mistral Small (EdenAI)", + "cost_per_1m_in": 13.85445626, + "cost_per_1m_out": 6.60087503, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8191, + "default_max_tokens": 2047, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mistral-small", + "name": "Mistral Small (EdenAI)", + "cost_per_1m_in": 0.20518092, + "cost_per_1m_out": 0.30344587, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8191, + "default_max_tokens": 8191, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mistral-small-latest", + "name": "Mistral Small (EdenAI)", + "cost_per_1m_in": 0.10024077, + "cost_per_1m_out": 0.2999939, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32000, + "default_max_tokens": 8191, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mistral-small", + "name": "Mistral Small (OpenRouter, 32K)", + "cost_per_1m_in": 0.20042644, + "cost_per_1m_out": 0.599989, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mistral-small-24b-instruct-2501", + "name": "Mistral Small 24B (DeepInfra)", + "cost_per_1m_in": 0.0500569, + "cost_per_1m_out": 0.07998484, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mistral-small-24b-instruct-2501", + "name": "Mistral Small 24B (OpenRouter Free)", + "cost_per_1m_in": 0.05006421, + "cost_per_1m_out": 0.07998304, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mistral-small-24b-instruct-2501", + "name": "Mistral Small 24B (OpenRouter)", + "cost_per_1m_in": 0.05006421, + "cost_per_1m_out": 0.07998304, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mistral-small-24b-instruct-2501", + "name": "Mistral Small 24B (Together)", + "cost_per_1m_in": 0.80056338, + "cost_per_1m_out": 0.79985297, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 2048, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mistral-small-24b-instruct", + "name": "Mistral Small 24B Instruct", + "cost_per_1m_in": 0.03002134, + "cost_per_1m_out": 0.82155157, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 2048, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mistral-small-3-1-24b-instruct", + "name": "Mistral Small 3.1 24B", + "cost_per_1m_in": 0.05007042, + "cost_per_1m_out": 0.02109307, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 96000, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mistral-small-3-1-24b-instruct-2503", + "name": "Mistral Small 3.1 24B (DeepInfra)", + "cost_per_1m_in": 0.07516051, + "cost_per_1m_out": 0.19999617, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "mistral-small-3-1-24b-instruct", + "name": "Mistral Small 3.1 24B (Vision, 128K)", + "cost_per_1m_in": 0.05008026, + "cost_per_1m_out": 0.09999823, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 128000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "mistral-small-3-1-24b-instruct", + "name": "Mistral Small 3.1 24B (Vision, Free)", + "cost_per_1m_in": 0.05008026, + "cost_per_1m_out": 0.09999823, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 96000, + "default_max_tokens": 96000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "mistral-small-3-2-24b-instruct", + "name": "Mistral Small 3.2 24B", + "cost_per_1m_in": 0.05008026, + "cost_per_1m_out": 0.09999802, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mistral-small-3-2-24b-instruct-2506", + "name": "Mistral Small 3.2 24B (DeepInfra)", + "cost_per_1m_in": 0.07516051, + "cost_per_1m_out": 0.19999644, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "mistral-small-3-2-24b-instruct", + "name": "Mistral Small 3.2 24B (Vision)", + "cost_per_1m_in": 0.06014446, + "cost_per_1m_out": 0.17999611, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "mistral-small-3-2-24b-instruct", + "name": "Mistral Small 3.2 24B (Vision, Free)", + "cost_per_1m_in": 0.06014446, + "cost_per_1m_out": 0.17999611, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "mistral-tiny", + "name": "Mistral Tiny", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8191, + "default_max_tokens": 8191, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mistral-tiny", + "name": "Mistral Tiny (EdenAI Chat)", + "cost_per_1m_in": 0.25, + "cost_per_1m_out": 0.25, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8191, + "default_max_tokens": 8191, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mistral-tiny", + "name": "Mistral Tiny (EdenAI)", + "cost_per_1m_in": 0.57522486, + "cost_per_1m_out": 0.26623077, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8191, + "default_max_tokens": 8191, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mistral-tiny", + "name": "Mistral Tiny (OpenRouter, 32K)", + "cost_per_1m_in": 0.25017768, + "cost_per_1m_out": 0.2499903, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "open-mixtral-8x22b", + "name": "Mixtral 8x22B (Eden AI)", + "cost_per_1m_in": 2, + "cost_per_1m_out": 6, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8191, + "default_max_tokens": 8191, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mixtral-8x22b-instruct", + "name": "Mixtral 8x22B Instruct", + "cost_per_1m_in": 0.48034067, + "cost_per_1m_out": 0.92395316, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 65536, + "default_max_tokens": 65536, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mixtral-8x22b-instruct-v0-1", + "name": "Mixtral 8x22B Instruct (DeepInfra)", + "cost_per_1m_in": 0.48034067, + "cost_per_1m_out": 0.47998946, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 65536, + "default_max_tokens": 65536, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mixtral-8x22b-instruct", + "name": "Mixtral 8x22B Instruct (OpenRouter)", + "cost_per_1m_in": 2.00426439, + "cost_per_1m_out": 5.99974848, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 65536, + "default_max_tokens": 65536, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "open-mixtral-8x7b", + "name": "Mixtral 8x7B (EdenAI)", + "cost_per_1m_in": 0.7, + "cost_per_1m_out": 0.7, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8191, + "default_max_tokens": 8191, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mixtral-8x7b-instruct-0-1", + "name": "Mixtral 8x7B Instruct", + "cost_per_1m_in": 0.60042463, + "cost_per_1m_out": 0.2003066, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 2048, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mixtral-8x7b-instruct", + "name": "Mixtral 8x7B Instruct", + "cost_per_1m_in": 0.4504954, + "cost_per_1m_out": 0.21736793, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32000, + "default_max_tokens": 8191, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mixtral-8x7b-instruct-v0", + "name": "Mixtral 8x7B Instruct (Bedrock)", + "cost_per_1m_in": 0.4504961, + "cost_per_1m_out": 0.69996892, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32000, + "default_max_tokens": 8191, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mixtral-8x7b-instruct-v0-1", + "name": "Mixtral 8x7B Instruct (DeepInfra)", + "cost_per_1m_in": 0.54038298, + "cost_per_1m_out": 0.53996495, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mixtral-8x7b-instruct", + "name": "Mixtral 8x7B Instruct (OpenRouter)", + "cost_per_1m_in": 0.54038244, + "cost_per_1m_out": 0.53996763, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "molmo-7b-d", + "name": "Molmo 7B-D (OpenRouter)", + "cost_per_1m_in": 0.10016353, + "cost_per_1m_out": 0.19997738, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 4096, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "moonlight-16b-a3b-instruct", + "name": "Moonlight 16B Instruct (OpenRouter)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8192, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "morph-v3-fast", + "name": "Morph v3 Fast (OpenRouter)", + "cost_per_1m_in": 1.17390637, + "cost_per_1m_out": -0.80039908, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 81920, + "default_max_tokens": 38000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "morph-v3-large", + "name": "Morph v3 Large (OpenRouter)", + "cost_per_1m_in": 1.49347659, + "cost_per_1m_out": 1.33803544, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 262144, + "default_max_tokens": 131072, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "multimodal", + "name": "Multimodal", + "cost_per_1m_in": 0.0602005, + "cost_per_1m_out": 12.56744161, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 4096, + "default_max_tokens": 1500, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "mythomax", + "name": "MythoMax", + "cost_per_1m_in": 0.06504359, + "cost_per_1m_out": 0.06499435, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 4096, + "default_max_tokens": 400, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mythomax-l2-13b", + "name": "MythoMax L2 13B", + "cost_per_1m_in": 0.06003968, + "cost_per_1m_out": 0.05999633, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 4096, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mythomax-l2-13b", + "name": "MythoMax L2 13B (DeepInfra)", + "cost_per_1m_in": 0.06004024, + "cost_per_1m_out": 0.05999566, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 4096, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mythomax-l2-13b", + "name": "MythoMax L2 13B (OpenRouter)", + "cost_per_1m_in": 0.06003971, + "cost_per_1m_out": 0.05999116, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 4096, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "mythomax-l2-13b-turbo", + "name": "MythoMax L2 13B Turbo (DeepInfra)", + "cost_per_1m_in": 0.06004024, + "cost_per_1m_out": 0.05999436, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 4096, + "default_max_tokens": 1024, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "nemotron-nano-9b-2", + "name": "Nemotron Nano 9B v2", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 128000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "nemotron-nano-9b-v2", + "name": "Nemotron Nano 9B v2 (131K)", + "cost_per_1m_in": 0.04012759, + "cost_per_1m_out": 0.15999604, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "nvidia-nemotron-nano-9b-v2", + "name": "Nemotron Nano 9B v2 (DeepInfra)", + "cost_per_1m_in": 0.04012759, + "cost_per_1m_out": 0.15999604, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "nemotron-nano-9b-v2", + "name": "Nemotron Nano 9B v2 (Free)", + "cost_per_1m_in": 0.04012759, + "cost_per_1m_out": 0.15999604, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 128000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "nemotron-4-340b-instruct", + "name": "Nemotron-4 340B Instruct (DeepInfra)", + "cost_per_1m_in": 1.00081699, + "cost_per_1m_out": 0.99995174, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 4096, + "default_max_tokens": 1024, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "noromaid-20b", + "name": "Noromaid 20B (OpenRouter)", + "cost_per_1m_in": 1.00115817, + "cost_per_1m_out": 1.74985718, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 4096, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "nova-lite", + "name": "Nova Lite", + "cost_per_1m_in": 0.0602005, + "cost_per_1m_out": 0.23999529, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "nova-lite-v1", + "name": "Nova Lite (Bedrock US)", + "cost_per_1m_in": 0.0602005, + "cost_per_1m_out": 0.23999599, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 300000, + "default_max_tokens": 10000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "nova-lite-v1", + "name": "Nova Lite (EdenAI EU)", + "cost_per_1m_in": 0.0602005, + "cost_per_1m_out": 0.23999526, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "nova-lite-v1", + "name": "Nova Lite (EdenAI)", + "cost_per_1m_in": 0.0602005, + "cost_per_1m_out": 0.23999526, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "nova-lite-v1", + "name": "Nova Lite (OpenRouter)", + "cost_per_1m_in": 0.0602005, + "cost_per_1m_out": 0.23999548, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 300000, + "default_max_tokens": 5120, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "nova-micro", + "name": "Nova Micro", + "cost_per_1m_in": 0.03512114, + "cost_per_1m_out": 0.14000343, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "nova-micro-v1", + "name": "Nova Micro (Bedrock US)", + "cost_per_1m_in": 0.03512114, + "cost_per_1m_out": 0.14000391, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 10000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "nova-micro-v1", + "name": "Nova Micro (Bedrock)", + "cost_per_1m_in": 0.03512114, + "cost_per_1m_out": 0.14000391, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 10000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "nova-micro-v1", + "name": "Nova Micro (EdenAI Vision, 300K)", + "cost_per_1m_in": 0.03512114, + "cost_per_1m_out": 0.14000299, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 300000, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "nova-micro-v1", + "name": "Nova Micro (OpenRouter)", + "cost_per_1m_in": 0.03512114, + "cost_per_1m_out": 0.14000381, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 5120, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "nova-premier", + "name": "Nova Premier", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1000000, + "default_max_tokens": 10000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "nova-premier-v1", + "name": "Nova Premier", + "cost_per_1m_in": 2.50983478, + "cost_per_1m_out": 12.49891199, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 0, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "nova-premier-v1", + "name": "Nova Premier (Multimodal)", + "cost_per_1m_in": 2.50983478, + "cost_per_1m_out": 12.49891199, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1000000, + "default_max_tokens": 10000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "nova-pro", + "name": "Nova Pro", + "cost_per_1m_in": 0.80267335, + "cost_per_1m_out": 3.1999483, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 300000, + "default_max_tokens": 5120, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "nova-pro-v1", + "name": "Nova Pro (Bedrock)", + "cost_per_1m_in": 0.80267335, + "cost_per_1m_out": 3.19994583, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 0, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "nova-pro-v1", + "name": "Nova Pro (OpenRouter)", + "cost_per_1m_in": 0.80267335, + "cost_per_1m_out": 3.19993808, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 300000, + "default_max_tokens": 5120, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "nova-pro-v1", + "name": "Nova Pro (Video)", + "cost_per_1m_in": 0.80267335, + "cost_per_1m_out": 3.19994583, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 300000, + "default_max_tokens": 10000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "nova-reel", + "name": "Nova Reel", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "nova-reel-v1", + "name": "Nova Reel v1 (Bedrock)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 0, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "nova-sonic-v1", + "name": "Nova Sonic v1 (Bedrock)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 0, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "o3-deep-research-2025-06-26", + "name": "O3 Deep Research (200K)", + "cost_per_1m_in": 10, + "cost_per_1m_out": 40, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 100000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "olmo-2-0325-32b-instruct", + "name": "OLMo-2 32B Instruct (OpenRouter)", + "cost_per_1m_in": 0.20028571, + "cost_per_1m_out": 0.34998981, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 4096, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "olmocr-2-7b-1025", + "name": "OlmOCR 2 7B (DeepInfra)", + "cost_per_1m_in": 0.09015397, + "cost_per_1m_out": 0.18997592, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 16384, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "olmocr-7b-0825", + "name": "OlmOCR 7B (DeepInfra)", + "cost_per_1m_in": 0.09015397, + "cost_per_1m_out": 0.18997592, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 16384, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "olmocr-7b-0725-fp8", + "name": "OlmOCR 7B (DeepInfra)", + "cost_per_1m_in": 0.09015397, + "cost_per_1m_out": 0.18997489, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 16384, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "olmocr-7b-1025", + "name": "OlmOCR 7B (DeepInfra)", + "cost_per_1m_in": 0.09015397, + "cost_per_1m_out": 0.18997592, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 16384, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "olmocr-7b", + "name": "OlmoCR 7B", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 16384, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "olympiccoder-32b", + "name": "OlympicCoder 32B (OpenRouter)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "omni-moderation", + "name": "Omni Moderation", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "open-codestral-mamba", + "name": "Open Codestral Mamba (EdenAI)", + "cost_per_1m_in": 0.25, + "cost_per_1m_out": 0.25, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 256000, + "default_max_tokens": 256000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "omni-moderation-latest", + "name": "OpenAI Omni Moderation", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 0, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "omni-moderation-2024-09-26", + "name": "OpenAI Omni Moderation (2024-09-26)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 0, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "openhands-lm-32b-v0-1", + "name": "OpenHands LM 32B v0.1 (OpenRouter)", + "cost_per_1m_in": 2.60278689, + "cost_per_1m_out": 3.39989142, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 16384, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "palm-2-bison", + "name": "PaLM 2 Bison", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 9216, + "default_max_tokens": 1024, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "palm-2-chat-bison-32k", + "name": "PaLM 2 Chat Bison (32K)", + "cost_per_1m_in": 1.00163532, + "cost_per_1m_out": 1.99978634, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "palm-2-chat-bison", + "name": "PaLM 2 Chat Bison (OpenRouter)", + "cost_per_1m_in": 1.00163532, + "cost_per_1m_out": 1.9998963, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 9216, + "default_max_tokens": 1024, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "palm-2-codechat-bison", + "name": "PaLM 2 Code Chat", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 7168, + "default_max_tokens": 1024, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "palm-2-codechat-bison-32k", + "name": "PaLM 2 Code Chat (32K)", + "cost_per_1m_in": 1.00163532, + "cost_per_1m_out": 1.99992644, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "palm-2-codechat-bison", + "name": "PaLM 2 Code Chat (OpenRouter)", + "cost_per_1m_in": 1.00163532, + "cost_per_1m_out": 1.99990226, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 7168, + "default_max_tokens": 1024, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "paddleocr-vl-0-9b", + "name": "PaddleOCR-VL 0.9B (DeepInfra)", + "cost_per_1m_in": 0.14058565, + "cost_per_1m_out": 0.7994594, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 16384, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "pegasus-1-2-v1", + "name": "Pegasus 1.2 (Video)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 7.5, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 0, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "phi-3-medium-128k-instruct", + "name": "Phi-3 Medium 128K Instruct (OpenRouter)", + "cost_per_1m_in": 1.00067705, + "cost_per_1m_out": 0.99997878, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 128000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "phi-3-medium-instruct", + "name": "Phi-3 Medium Instruct", + "cost_per_1m_in": 0.03004082, + "cost_per_1m_out": 1.0294739, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 4096, + "default_max_tokens": 128000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "phi-3-mini-128k-instruct", + "name": "Phi-3 Mini 128K Instruct (OpenRouter)", + "cost_per_1m_in": 0.1000677, + "cost_per_1m_out": 0.09999683, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 128000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "phi-3-5-mini-128k-instruct", + "name": "Phi-3.5 Mini (128K)", + "cost_per_1m_in": 0.1000677, + "cost_per_1m_out": 0.09999851, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 128000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "phi-4", + "name": "Phi-4", + "cost_per_1m_in": 0.07011457, + "cost_per_1m_out": 0.13999593, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 16384, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "phi-4", + "name": "Phi-4 (DeepInfra)", + "cost_per_1m_in": 0.07011457, + "cost_per_1m_out": 0.13999579, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 16384, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "phi-4", + "name": "Phi-4 (OpenRouter)", + "cost_per_1m_in": 0.06011457, + "cost_per_1m_out": 0.1399959, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 16384, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "phi-4-multimodal-instruct", + "name": "Phi-4 Multimodal (DeepInfra)", + "cost_per_1m_in": 0.04010578, + "cost_per_1m_out": 0.12999725, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "phi-4-multimodal-instruct", + "name": "Phi-4 Multimodal (OpenRouter)", + "cost_per_1m_in": 0.05008137, + "cost_per_1m_out": 0.09999788, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "phi-4-multimodal-instruct", + "name": "Phi-4 Multimodal Instruct", + "cost_per_1m_in": 0.0500823, + "cost_per_1m_out": 0.09999719, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "phi-4-reasoning", + "name": "Phi-4 Reasoning (OpenRouter)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "phi-4-reasoning-plus", + "name": "Phi-4 Reasoning Plus", + "cost_per_1m_in": 0.07024289, + "cost_per_1m_out": 0.34994049, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "phi-4-reasoning-plus", + "name": "Phi-4 Reasoning Plus (DeepInfra)", + "cost_per_1m_in": 0.07011457, + "cost_per_1m_out": 0.13999633, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "phi-4-reasoning-plus", + "name": "Phi-4 Reasoning Plus (OpenRouter)", + "cost_per_1m_in": 0.07028642, + "cost_per_1m_out": 0.34999064, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "phind-codellama-34b-v2", + "name": "Phind CodeLlama 34B v2 (DeepInfra)", + "cost_per_1m_in": 0.40032653, + "cost_per_1m_out": 0.39999053, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 4096, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "pixtral-12b", + "name": "Pixtral 12B (OpenRouter)", + "cost_per_1m_in": 0.10008026, + "cost_per_1m_out": 0.09999732, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "pixtral-large", + "name": "Pixtral Large", + "cost_per_1m_in": 2.00426136, + "cost_per_1m_out": 5.99990625, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "pixtral-large-2411", + "name": "Pixtral Large (OpenRouter)", + "cost_per_1m_in": 2.00426439, + "cost_per_1m_out": 5.99988614, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "pixtral-large-2502-v1", + "name": "Pixtral Large (Text Only)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 0, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "pixtral-large-2502-v1", + "name": "Pixtral Large (Vision)", + "cost_per_1m_in": 2, + "cost_per_1m_out": 6, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "pixtral-large-latest", + "name": "Pixtral Large (Vision)", + "cost_per_1m_in": 2.00426439, + "cost_per_1m_out": 5.99990618, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 128000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "premium-coding", + "name": "Premium Coding", + "cost_per_1m_in": 3.01090116, + "cost_per_1m_out": 14.99965052, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8192, + "default_max_tokens": 2048, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "premium", + "name": "Premium Pool", + "cost_per_1m_in": 3.01090116, + "cost_per_1m_out": 14.99966811, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8192, + "default_max_tokens": 2048, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwq-32b", + "name": "QwQ 32B", + "cost_per_1m_in": 0.07512653, + "cost_per_1m_out": 0.15000133, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwq-32b-arliai-rpr", + "name": "QwQ 32B", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwq-32b", + "name": "QwQ 32B (OpenRouter)", + "cost_per_1m_in": 0.78608163, + "cost_per_1m_out": 0.38833984, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwq-32b-arliai-rpr-v1", + "name": "QwQ-32B ArliAI RpR v1 (OpenRouter Free)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwq-32b-arliai-rpr-v1", + "name": "QwQ-32B ArliAI RpR v1 (OpenRouter)", + "cost_per_1m_in": 0.03, + "cost_per_1m_out": 0.11, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-2-large", + "name": "Qwen 2 Large", + "cost_per_1m_in": 0.9007359, + "cost_per_1m_out": 0.89997209, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 8000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-2-small", + "name": "Qwen 2 Small", + "cost_per_1m_in": 0.04008177, + "cost_per_1m_out": 0.09999779, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 8000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-2-5-72b-instruct", + "name": "Qwen 2.5 72B Instruct", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-2-5-72b-instruct", + "name": "Qwen 2.5 72B Instruct (DeepInfra)", + "cost_per_1m_in": 0.12031889, + "cost_per_1m_out": 0.38999139, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-2-5-72b-instruct", + "name": "Qwen 2.5 72B Instruct (Free)", + "cost_per_1m_in": 0.07021259, + "cost_per_1m_out": 0.25999426, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-2-5-72b-instruct", + "name": "Qwen 2.5 72B Instruct (OpenRouter)", + "cost_per_1m_in": 0.07021259, + "cost_per_1m_out": 0.25999426, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-2-5-7b-instruct", + "name": "Qwen 2.5 7B Instruct", + "cost_per_1m_in": 0.04008177, + "cost_per_1m_out": 0.09999766, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-2-5-7b-instruct", + "name": "Qwen 2.5 7B Instruct (DeepInfra)", + "cost_per_1m_in": 0.06019624, + "cost_per_1m_out": 0.2399947, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-2-5-7b-instruct", + "name": "Qwen 2.5 7B Instruct (OpenRouter)", + "cost_per_1m_in": 0.04008039, + "cost_per_1m_out": 0.09999745, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-2-5-7b-instruct-turbo", + "name": "Qwen 2.5 7B Instruct Turbo (Together)", + "cost_per_1m_in": 0.30024116, + "cost_per_1m_out": 0.29998679, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 2048, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-2-5-coder-32b-instruct", + "name": "Qwen 2.5 Coder 32B (DeepInfra)", + "cost_per_1m_in": 0.10022895, + "cost_per_1m_out": 0.27999382, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-2-5-coder-32b-instruct", + "name": "Qwen 2.5 Coder 32B (Free)", + "cost_per_1m_in": 0.04013083, + "cost_per_1m_out": 0.15999647, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-2-5-coder-32b-instruct", + "name": "Qwen 2.5 Coder 32B (OpenRouter)", + "cost_per_1m_in": 0.04013083, + "cost_per_1m_out": 0.15999647, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-2-5-coder-32b-instruct", + "name": "Qwen 2.5 Coder 32B (Together)", + "cost_per_1m_in": 0.80064309, + "cost_per_1m_out": 0.79996381, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 2048, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-2-5-coder-32b-instruct", + "name": "Qwen 2.5 Coder 32B Instruct", + "cost_per_1m_in": 0.06012265, + "cost_per_1m_out": 0.1499963, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 2048, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-2-5-coder-7b", + "name": "Qwen 2.5 Coder 7B (DeepInfra)", + "cost_per_1m_in": 0.04008177, + "cost_per_1m_out": 0.09999779, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-2-5-coder-7b-instruct", + "name": "Qwen 2.5 Coder 7B (OpenRouter)", + "cost_per_1m_in": 0.03007235, + "cost_per_1m_out": 0.08999636, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-2-5-large", + "name": "Qwen 2.5 Large", + "cost_per_1m_in": 0.60048622, + "cost_per_1m_out": 0.59994721, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 8000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-2-5-small", + "name": "Qwen 2.5 Small", + "cost_per_1m_in": 0.04008177, + "cost_per_1m_out": 0.09999767, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 8000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-2-5-vl-7b-instruct", + "name": "Qwen 2.5 VL 7B (Vision)", + "cost_per_1m_in": 0.20016207, + "cost_per_1m_out": 0.19997162, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "qwen-3-235b-a22b-thinking", + "name": "Qwen 3 235B A22B Thinking", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 262144, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-3-235b-a22b-instruct", + "name": "Qwen 3 235B Instruct", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 262144, + "default_max_tokens": 2048, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-3-32b-v1", + "name": "Qwen 3 32B (Bedrock)", + "cost_per_1m_in": 0.150489, + "cost_per_1m_out": 0.59998484, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 16384, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-3-32b-fp8", + "name": "Qwen 3 32B (Together, FP8)", + "cost_per_1m_in": 8.25919869, + "cost_per_1m_out": 9.87800164, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 40960, + "default_max_tokens": 40960, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-3-8b", + "name": "Qwen 3 8B", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 40960, + "default_max_tokens": 20000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-3-8b", + "name": "Qwen 3 8B (OpenRouter Free, 40K)", + "cost_per_1m_in": 0.03511438, + "cost_per_1m_out": 0.1379968, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 40960, + "default_max_tokens": 40960, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-3-8b", + "name": "Qwen 3 8B (OpenRouter, 128K)", + "cost_per_1m_in": 0.03511438, + "cost_per_1m_out": 0.1379968, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 20000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-3-coder", + "name": "Qwen 3 Coder", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 65536, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-3-coder", + "name": "Qwen 3 Coder (Exacto)", + "cost_per_1m_in": 0.22077678, + "cost_per_1m_out": 0.94996982, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 262144, + "default_max_tokens": 262144, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-3-coder", + "name": "Qwen 3 Coder (Free)", + "cost_per_1m_in": 0.22077678, + "cost_per_1m_out": 0.94996982, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 262000, + "default_max_tokens": 262000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-3-coder", + "name": "Qwen 3 Coder (OpenRouter)", + "cost_per_1m_in": 0.22077678, + "cost_per_1m_out": 0.94996982, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 262144, + "default_max_tokens": 262144, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-3-coder-30b-a3b-v1", + "name": "Qwen 3 Coder 30B (Bedrock)", + "cost_per_1m_in": 0.1504906, + "cost_per_1m_out": 0.59998675, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 262144, + "default_max_tokens": 131072, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-3-coder-480b-a35b-instruct-fp8", + "name": "Qwen 3 Coder 480B (Together)", + "cost_per_1m_in": 2.00163532, + "cost_per_1m_out": 1.99995078, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 262144, + "default_max_tokens": 2048, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-3-coder-flash", + "name": "Qwen 3 Coder Flash (OpenRouter)", + "cost_per_1m_in": 0.30122649, + "cost_per_1m_out": 1.49996688, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 65536, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-3-coder-plus", + "name": "Qwen 3 Coder Plus (OpenRouter)", + "cost_per_1m_in": 1.00408831, + "cost_per_1m_out": 4.99983351, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 65536, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-3-1-7b", + "name": "Qwen 3.1 7B (OpenRouter)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32000, + "default_max_tokens": 32000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-max", + "name": "Qwen Max (OpenRouter)", + "cost_per_1m_in": 1.60523303, + "cost_per_1m_out": 6.39985871, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-plus-2025-07-28", + "name": "Qwen Plus 0728 (1M)", + "cost_per_1m_in": 0.40098119, + "cost_per_1m_out": 1.19997351, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1000000, + "default_max_tokens": 32768, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-plus-2025-07-28", + "name": "Qwen Plus 0728 Thinking (1M)", + "cost_per_1m_in": 0.40098119, + "cost_per_1m_out": 1.19997351, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1000000, + "default_max_tokens": 32768, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-turbo", + "name": "Qwen Turbo (1M)", + "cost_per_1m_in": 0.050163, + "cost_per_1m_out": 0.19999495, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 1000000, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-vl-max", + "name": "Qwen VL Max (OpenRouter)", + "cost_per_1m_in": 0.80261652, + "cost_per_1m_out": 3.19993197, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "qwen-vl-plus", + "name": "Qwen VL Plus (OpenRouter)", + "cost_per_1m_in": 0.21051513, + "cost_per_1m_out": 0.62998543, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 7500, + "default_max_tokens": 1500, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "qwen-plus", + "name": "Qwen-Plus", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-plus", + "name": "Qwen-Plus (OpenRouter)", + "cost_per_1m_in": 0.40098119, + "cost_per_1m_out": 1.19997351, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-2-72b-instruct", + "name": "Qwen2 72B Instruct", + "cost_per_1m_in": 0.9007359, + "cost_per_1m_out": 0.36892013, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-2-72b-instruct", + "name": "Qwen2 72B Instruct (DeepInfra)", + "cost_per_1m_in": 0.12031889, + "cost_per_1m_out": 0.38999139, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-2-72b-instruct", + "name": "Qwen2 72B Instruct (OpenRouter)", + "cost_per_1m_in": 0.9007359, + "cost_per_1m_out": 0.89996127, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-2-5-vl-3b-instruct", + "name": "Qwen2.5 VL 3B Instruct (OpenRouter)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 64000, + "default_max_tokens": 64000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "qwen-2-5-vl-32b-instruct", + "name": "Qwen2.5-VL 32B (DeepInfra, 128K)", + "cost_per_1m_in": 0.20048622, + "cost_per_1m_out": 0.59998152, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "qwen-2-5-vl-32b-instruct", + "name": "Qwen2.5-VL 32B (OpenRouter)", + "cost_per_1m_in": 0.05017828, + "cost_per_1m_out": 0.21999323, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 16384, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "qwen-2-5-vl-32b-instruct", + "name": "Qwen2.5-VL 32B (OpenRouter, Free)", + "cost_per_1m_in": 0.05017828, + "cost_per_1m_out": 0.21999323, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 16384, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "qwen-2-5-vl-32b-instruct", + "name": "Qwen2.5-VL 32B Instruct", + "cost_per_1m_in": 0.20048622, + "cost_per_1m_out": 0.59998152, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 16384, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-2-5-vl-72b-instruct", + "name": "Qwen2.5-VL 72B", + "cost_per_1m_in": 0.60097245, + "cost_per_1m_out": 0.70655716, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 2048, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-2-5-vl-72b-instruct", + "name": "Qwen2.5-VL 72B (Free, 131K)", + "cost_per_1m_in": 0.08026742, + "cost_per_1m_out": 0.32998329, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 2048, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "qwen-2-5-vl-72b-instruct", + "name": "Qwen2.5-VL 72B (OpenRouter)", + "cost_per_1m_in": 0.08026742, + "cost_per_1m_out": 0.32998329, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "qwen-3-0-6b-04-28", + "name": "Qwen3 0.6B (32K)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32000, + "default_max_tokens": 32000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-3-14b", + "name": "Qwen3 14B", + "cost_per_1m_in": 0.06019624, + "cost_per_1m_out": 0.23999451, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 40960, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-3-14b", + "name": "Qwen3 14B (DeepInfra)", + "cost_per_1m_in": 0.06019624, + "cost_per_1m_out": 0.2399947, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 40960, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-3-14b", + "name": "Qwen3 14B (Free)", + "cost_per_1m_in": 0.05017974, + "cost_per_1m_out": 0.21999497, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 40960, + "default_max_tokens": 40960, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-3-14b", + "name": "Qwen3 14B (OpenRouter)", + "cost_per_1m_in": 0.05017974, + "cost_per_1m_out": 0.21999497, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 40960, + "default_max_tokens": 40960, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-3-235b-a22b-2507", + "name": "Qwen3 235B A22B (OpenRouter)", + "cost_per_1m_in": 0.08044971, + "cost_per_1m_out": 0.54998786, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 262144, + "default_max_tokens": 262144, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-3-235b-a22b-07-25", + "name": "Qwen3 235B A22B (OpenRouter)", + "cost_per_1m_in": 0.12048242, + "cost_per_1m_out": 0.58998697, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 262144, + "default_max_tokens": 262144, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-3-235b-a22b-instruct-2507", + "name": "Qwen3 235B A22B Instruct (DeepInfra)", + "cost_per_1m_in": 0.09046607, + "cost_per_1m_out": 0.56998742, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 262144, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-3-235b-a22b-thinking-2507", + "name": "Qwen3 235B Thinking (DeepInfra)", + "cost_per_1m_in": 0.30237122, + "cost_per_1m_out": 2.89993598, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 262144, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-3-235b-a22b-thinking-2507", + "name": "Qwen3 235B Thinking (OpenRouter)", + "cost_per_1m_in": 0.1104902, + "cost_per_1m_out": 0.59998627, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 262144, + "default_max_tokens": 262144, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-3-235b-a22b", + "name": "Qwen3 235B-A22B", + "cost_per_1m_in": 0.1304906, + "cost_per_1m_out": 0.59998626, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 40960, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-3-235b-a22b", + "name": "Qwen3 235B-A22B (DeepInfra)", + "cost_per_1m_in": 0.09046607, + "cost_per_1m_out": 0.56998742, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 40960, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-3-235b-a22b", + "name": "Qwen3 235B-A22B (Free)", + "cost_per_1m_in": 0.18044118, + "cost_per_1m_out": 0.53998765, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 40960, + "default_max_tokens": 40960, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-3-235b-a22b", + "name": "Qwen3 235B-A22B (OpenRouter)", + "cost_per_1m_in": 0.18044118, + "cost_per_1m_out": 0.53998765, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 40960, + "default_max_tokens": 40960, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-3-30b-a3b", + "name": "Qwen3 30B A3B", + "cost_per_1m_in": 0.08023693, + "cost_per_1m_out": 0.2899936, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 40960, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-3-30b-a3b", + "name": "Qwen3 30B A3B (DeepInfra)", + "cost_per_1m_in": 0.08023712, + "cost_per_1m_out": 0.2899936, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 40960, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-3-30b-a3b", + "name": "Qwen3 30B A3B (OpenRouter Free)", + "cost_per_1m_in": 0.15843521, + "cost_per_1m_out": 0.21724381, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 40960, + "default_max_tokens": 40960, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-3-30b-a3b", + "name": "Qwen3 30B A3B (OpenRouter)", + "cost_per_1m_in": 0.15843521, + "cost_per_1m_out": 0.21724381, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 40960, + "default_max_tokens": 40960, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-3-30b-a3b-instruct-2507", + "name": "Qwen3 30B A3B Instruct (262K)", + "cost_per_1m_in": 0.08026983, + "cost_per_1m_out": 0.32999271, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 262144, + "default_max_tokens": 262144, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-3-30b-a3b-thinking-2507", + "name": "Qwen3 30B A3B Thinking (OpenRouter)", + "cost_per_1m_in": 0.39225306, + "cost_per_1m_out": 0.28094466, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 262144, + "default_max_tokens": 262144, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-3-4b", + "name": "Qwen3 4B", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 40960, + "default_max_tokens": 40960, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-3-4b", + "name": "Qwen3 4B (OpenRouter Free)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 40960, + "default_max_tokens": 40960, + "can_reason": true, + "has_reasoning_efforts": true, + "supports_attachments": false + }, + { + "id": "qwen-3-4b", + "name": "Qwen3 4B (OpenRouter)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 40960, + "default_max_tokens": 40960, + "can_reason": true, + "has_reasoning_efforts": true, + "supports_attachments": false + }, + { + "id": "qwen-3-coder-30b-a3b-instruct", + "name": "Qwen3 Coder 30B A3B", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 262144, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-3-coder-30b-a3b-instruct", + "name": "Qwen3 Coder 30B A3B (DeepInfra)", + "cost_per_1m_in": 0.07021259, + "cost_per_1m_out": 0.25999426, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 262144, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-3-coder-30b-a3b-instruct", + "name": "Qwen3 Coder 30B A3B (OpenRouter)", + "cost_per_1m_in": 0.2022731, + "cost_per_1m_out": 0.24615863, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 262144, + "default_max_tokens": 262144, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-3-coder-480b-a35b-instruct", + "name": "Qwen3 Coder 480B A35B", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 262144, + "default_max_tokens": 2048, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-3-coder-480b-a35b-instruct", + "name": "Qwen3 Coder 480B A35B (DeepInfra)", + "cost_per_1m_in": 0.40130826, + "cost_per_1m_out": 1.59995471, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 262144, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-3-coder-480b-a35b-instruct-turbo", + "name": "Qwen3 Coder 480B Instruct Turbo (262K)", + "cost_per_1m_in": 0.29098119, + "cost_per_1m_out": 1.19996582, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 262144, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-3-32b", + "name": "Qwen3-32B", + "cost_per_1m_in": 0.02702614, + "cost_per_1m_out": 0.30197029, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 40960, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-3-32b", + "name": "Qwen3-32B (DeepInfra)", + "cost_per_1m_in": 0.10022895, + "cost_per_1m_out": 0.27999382, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 40960, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-3-32b", + "name": "Qwen3-32B (OpenRouter)", + "cost_per_1m_in": 0.23609975, + "cost_per_1m_out": 0.19478921, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 40960, + "default_max_tokens": 40960, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-3-max", + "name": "Qwen3-Max (OpenRouter)", + "cost_per_1m_in": 1.20490597, + "cost_per_1m_out": 5.99986754, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 256000, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-3-next-80b-a3b-instruct", + "name": "Qwen3-Next 80B A3B Instruct", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 262144, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-3-next-80b-a3b-instruct", + "name": "Qwen3-Next 80B A3B Instruct (DeepInfra)", + "cost_per_1m_in": 0.14089943, + "cost_per_1m_out": 1.09997572, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 262144, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-3-next-80b-a3b-instruct", + "name": "Qwen3-Next 80B A3B Instruct (OpenRouter)", + "cost_per_1m_in": 0.10065413, + "cost_per_1m_out": 0.79998234, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 262144, + "default_max_tokens": 262144, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-3-next-80b-a3b-thinking", + "name": "Qwen3-Next 80B A3B Thinking", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 262144, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-3-next-80b-a3b-thinking", + "name": "Qwen3-Next 80B A3B Thinking (DeepInfra)", + "cost_per_1m_in": 0.14, + "cost_per_1m_out": 1.2, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 262144, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-3-next-80b-a3b-thinking", + "name": "Qwen3-Next 80B A3B Thinking (OpenRouter)", + "cost_per_1m_in": 0.15097959, + "cost_per_1m_out": 1.19997159, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 262144, + "default_max_tokens": 262144, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-3-vl-235b-a22b-instruct", + "name": "Qwen3-VL 235B Instruct", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 262144, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-3-vl-235b-a22b-instruct", + "name": "Qwen3-VL 235B Instruct (DeepInfra)", + "cost_per_1m_in": 0.30121832, + "cost_per_1m_out": 1.48996711, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 262144, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "qwen-3-vl-235b-a22b-instruct", + "name": "Qwen3-VL 235B Instruct (OpenRouter)", + "cost_per_1m_in": 0.30098119, + "cost_per_1m_out": 1.19997351, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 262144, + "default_max_tokens": 262144, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "qwen-3-vl-30b-a3b-instruct", + "name": "Qwen3-VL 30B Instruct", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 4000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-3-vl-30b-a3b-instruct", + "name": "Qwen3-VL 30B Instruct (DeepInfra, 262K)", + "cost_per_1m_in": 0.29080948, + "cost_per_1m_out": 0.98997814, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 262144, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "qwen-3-vl-30b-a3b-instruct", + "name": "Qwen3-VL 30B Instruct (OpenRouter)", + "cost_per_1m_in": 0.1504906, + "cost_per_1m_out": 0.59999364, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 4000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "qwen-3-vl-30b-a3b-thinking", + "name": "Qwen3-VL 30B Thinking", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-3-vl-30b-a3b-thinking", + "name": "Qwen3-VL 30B Thinking (DeepInfra, 262K)", + "cost_per_1m_in": 0.29, + "cost_per_1m_out": 0.9900000000000001, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 262144, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "qwen-3-vl-30b-a3b-thinking", + "name": "Qwen3-VL 30B Thinking (OpenRouter, 131K)", + "cost_per_1m_in": 1.16571429, + "cost_per_1m_out": 0.9811916, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 32768, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "qwen-3-vl-32b-instruct", + "name": "Qwen3-VL 32B (OpenRouter)", + "cost_per_1m_in": 0.35089943, + "cost_per_1m_out": 1.09997572, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 262144, + "default_max_tokens": 262144, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "qwen-3-vl-4b-instruct", + "name": "Qwen3-VL 4B (DeepInfra)", + "cost_per_1m_in": 0.1004906, + "cost_per_1m_out": 0.59998675, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 262144, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "qwen-3-vl-4b-thinking", + "name": "Qwen3-VL 4B Thinking (DeepInfra)", + "cost_per_1m_in": 0.09999999999999999, + "cost_per_1m_out": 1, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 262144, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "qwen-3-vl-8b-instruct", + "name": "Qwen3-VL 8B (DeepInfra, 262K)", + "cost_per_1m_in": 0.18056419, + "cost_per_1m_out": 0.68998477, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 262144, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "qwen-3-vl-8b-instruct", + "name": "Qwen3-VL 8B (OpenRouter)", + "cost_per_1m_in": 0.08040883, + "cost_per_1m_out": 0.49448078, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "qwen-3-vl-8b-instruct", + "name": "Qwen3-VL 8B Instruct", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-3-vl-8b-thinking", + "name": "Qwen3-VL 8B Thinking (DeepInfra)", + "cost_per_1m_in": 0.18, + "cost_per_1m_out": 2.09, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 262144, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "qwen-3-vl-235b-a22b-thinking", + "name": "Qwen3-VL-235B Thinking", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 262144, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "qwen-3-vl-235b-a22b-thinking", + "name": "Qwen3-VL-235B Thinking (DeepInfra)", + "cost_per_1m_in": 0.44999999999999996, + "cost_per_1m_out": 3.49, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 262144, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "qwen-3-vl-235b-a22b-thinking", + "name": "Qwen3-VL-235B Thinking (OpenRouter)", + "cost_per_1m_in": 0.78372854, + "cost_per_1m_out": 1.19288995, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 262144, + "default_max_tokens": 262144, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "qwerky-72b", + "name": "Qwerky 72B (OpenRouter)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "remm-slerp-l2-13b", + "name": "ReMM SLERP L2 13B", + "cost_per_1m_in": 0.45043018, + "cost_per_1m_out": 0.64990017, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 6144, + "default_max_tokens": 6144, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "reasoners", + "name": "Reasoners", + "cost_per_1m_in": 0.04003273, + "cost_per_1m_out": 60.37399918, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8192, + "default_max_tokens": 2048, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "refuel-llm-v2-small", + "name": "Refuel LLM V2 Small (Together)", + "cost_per_1m_in": 0.20016327, + "cost_per_1m_out": 0.19998422, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8192, + "default_max_tokens": 2048, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "reka-flash-3", + "name": "Reka Flash 3 (OpenRouter)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "relace-apply-3", + "name": "Relace Apply 3 (256K)", + "cost_per_1m_in": 0.85, + "cost_per_1m_out": 1.25, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 256000, + "default_max_tokens": 128000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "ring-1t", + "name": "Ring-1T (OpenRouter)", + "cost_per_1m_in": 0.5700000000000001, + "cost_per_1m_out": 2.2800000000000002, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "rocinante-12b", + "name": "Rocinante 12B (OpenRouter)", + "cost_per_1m_in": 0.17033992, + "cost_per_1m_out": 0.42997508, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "seed-1-6-250915", + "name": "SEED 1.6 (ByteDance)", + "cost_per_1m_in": 0.25, + "cost_per_1m_out": 2, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 224000, + "default_max_tokens": 256000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "sarvam-m", + "name": "Sarvam-M (32K)", + "cost_per_1m_in": 0.25058685, + "cost_per_1m_out": 0.74995387, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "seedream-3-0-t2i-250415", + "name": "SeeDream 3.0 T2I (EdenAI)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 0, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "seedream-4-0-250828", + "name": "SeeDream 4.0 (EdenAI)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 0, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "seed-oss-36b-instruct", + "name": "Seed-OSS 36B Instruct (131K)", + "cost_per_1m_in": 0.16, + "cost_per_1m_out": 0.65, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": true, + "has_reasoning_efforts": true, + "supports_attachments": false + }, + { + "id": "shisa-2-llama-3-3-70b", + "name": "Shisa 2 Llama 3.3 70B", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "shisa-v2-llama-3-3-70b", + "name": "Shisa V2 Llama 3.3 70B", + "cost_per_1m_in": 0.049999999999999996, + "cost_per_1m_out": 0.22, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "shisa-v2-llama-3-3-70b", + "name": "Shisa V2 Llama 3.3 70B (Free)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "sky-t1-32b-preview", + "name": "Sky-T1 32B Preview (DeepInfra)", + "cost_per_1m_in": 0.10022895, + "cost_per_1m_out": 0.27999382, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "skyfall-36b-v2", + "name": "Skyfall 36B v2 (OpenRouter)", + "cost_per_1m_in": 0.08023239, + "cost_per_1m_out": 0.3299263, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "sonar", + "name": "Sonar (Perplexity)", + "cost_per_1m_in": 1.00082305, + "cost_per_1m_out": 0.9999735, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 127072, + "default_max_tokens": 127072, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "sonar-deep-research", + "name": "Sonar Deep Research (Perplexity)", + "cost_per_1m_in": 2.00661704, + "cost_per_1m_out": 7.99988751, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 128000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "sonar-pro", + "name": "Sonar Pro (Vision, 200K)", + "cost_per_1m_in": 3.01234568, + "cost_per_1m_out": 14.99971077, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 8000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "sonar-reasoning", + "name": "Sonar Reasoning (127K)", + "cost_per_1m_in": 1.00411523, + "cost_per_1m_out": 4.99992158, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 127000, + "default_max_tokens": 127000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "sonar-reasoning-pro", + "name": "Sonar Reasoning Pro (Perplexity)", + "cost_per_1m_in": 2.00661704, + "cost_per_1m_out": 7.99988751, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 128000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "sora-2", + "name": "Sora 2 (OpenAI)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 0, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "sora-2-pro", + "name": "Sora 2 Pro", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 0, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "sorcererlm-8x22b", + "name": "SorcererLM 8x22B (16K)", + "cost_per_1m_in": 4.50312067, + "cost_per_1m_out": 4.49970887, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 16000, + "default_max_tokens": 16000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "spotlight", + "name": "Spotlight Vision (131K)", + "cost_per_1m_in": 0.18014587, + "cost_per_1m_out": 0.17998661, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 65537, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "step-3", + "name": "Step-3 (OpenRouter)", + "cost_per_1m_in": 1.96790336, + "cost_per_1m_out": 1.39255301, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 65536, + "default_max_tokens": 65536, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "router", + "name": "Switchpoint Router (OpenRouter)", + "cost_per_1m_in": 0.85, + "cost_per_1m_out": 3.4, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "text-embedding-004", + "name": "Text Embedding 004 (Google)", + "cost_per_1m_in": 0.09999999999999999, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 2048, + "default_max_tokens": 512, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "text-multilingual-embedding-002", + "name": "Text Multilingual Embedding 002 (Google)", + "cost_per_1m_in": 0.09999999999999999, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 2048, + "default_max_tokens": 512, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "text-davinci-002", + "name": "Text-Davinci-002 (EdenAI)", + "cost_per_1m_in": 2, + "cost_per_1m_out": 2, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 0, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "titan-text-express", + "name": "Titan Text Express", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8000, + "default_max_tokens": 8000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "titan-text-express-v1", + "name": "Titan Text Express v1 (Bedrock)", + "cost_per_1m_in": 1.30131477, + "cost_per_1m_out": 1.69983357, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 42000, + "default_max_tokens": 8000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "titan-text-express-v1", + "name": "Titan Text Express v1 (EdenAI)", + "cost_per_1m_in": 1.3, + "cost_per_1m_out": 1.7, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8000, + "default_max_tokens": 8000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "titan-tg-1-large", + "name": "Titan Text Large", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32000, + "default_max_tokens": 32000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "titan-tg-1-large", + "name": "Titan Text Large (Bedrock)", + "cost_per_1m_in": 1.00116009, + "cost_per_1m_out": 1.49992135, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32000, + "default_max_tokens": 32000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "titan-tg-1-large", + "name": "Titan Text Large (EdenAI)", + "cost_per_1m_in": 1.5999999999999999, + "cost_per_1m_out": 1.5999999999999999, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 0, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "titan-text-lite", + "name": "Titan Text Lite", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 4000, + "default_max_tokens": 4000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "titan-text-lite-v1", + "name": "Titan Text Lite v1 (Bedrock)", + "cost_per_1m_in": 0.30030936, + "cost_per_1m_out": 0.39996444, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 42000, + "default_max_tokens": 4000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "titan-text-lite-v1", + "name": "Titan Text Lite v1 (EdenAI)", + "cost_per_1m_in": 0.3, + "cost_per_1m_out": 0.39999999999999997, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 4000, + "default_max_tokens": 4000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "tongyi-deepresearch-30b-a3b", + "name": "Tongyi DeepResearch 30B", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "tongyi-deepresearch-30b-a3b", + "name": "Tongyi DeepResearch 30B (Free)", + "cost_per_1m_in": 0.09032706, + "cost_per_1m_out": 0.39999117, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "tongyi-deepresearch-30b-a3b", + "name": "Tongyi DeepResearch 30B (OpenRouter)", + "cost_per_1m_in": 0.09032706, + "cost_per_1m_out": 0.39999117, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 131072, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "ui-tars-1-5-7b", + "name": "UI-TARS 1.5 7B (OpenRouter)", + "cost_per_1m_in": 0.10016207, + "cost_per_1m_out": 0.19999384, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 2048, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "ui-tars-72b", + "name": "UI-TARS 72B (OpenRouter)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "ultra-fast", + "name": "Ultra Fast", + "cost_per_1m_in": 0.00301127, + "cost_per_1m_out": 1.52519943, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 8000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "unslopnemo-12b", + "name": "UnslopNemo 12B (OpenRouter)", + "cost_per_1m_in": 0.40032, + "cost_per_1m_out": 0.399984, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "virtuoso-large", + "name": "Virtuoso Large (OpenRouter)", + "cost_per_1m_in": 0.75096463, + "cost_per_1m_out": 1.199951, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 131072, + "default_max_tokens": 64000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "visionx", + "name": "VisionX", + "cost_per_1m_in": 0.0602005, + "cost_per_1m_out": 12.57960958, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 4096, + "default_max_tokens": 1500, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "wan-2-1-t2v-1-3b", + "name": "Wan 2.1 T2V 1.3B (DeepInfra)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 0, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "weaver", + "name": "Weaver (8K)", + "cost_per_1m_in": 1.12572343, + "cost_per_1m_out": 1.12487771, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8000, + "default_max_tokens": 2000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "wizardlm-2-8x22b", + "name": "WizardLM-2 8x22B", + "cost_per_1m_in": 0.48034067, + "cost_per_1m_out": 0.47998092, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 65536, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "wizardlm-2-8x22b", + "name": "WizardLM-2 8x22B (DeepInfra)", + "cost_per_1m_in": 0.48034067, + "cost_per_1m_out": 0.47999097, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 65536, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "wizardlm-2-8x22b", + "name": "WizardLM-2 8x22B (OpenRouter)", + "cost_per_1m_in": 0.4803331, + "cost_per_1m_out": 0.47998135, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 65536, + "default_max_tokens": 16384, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "wizardlm-2-8x22b", + "name": "WizardLM-2 8x22B (Together)", + "cost_per_1m_in": 1.20084926, + "cost_per_1m_out": 1.19997137, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 65536, + "default_max_tokens": 65536, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "yi-large", + "name": "Yi Large (OpenRouter)", + "cost_per_1m_in": 3.00215827, + "cost_per_1m_out": 2.99989435, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 32768, + "default_max_tokens": 4096, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "zephyr-7b-beta", + "name": "Zephyr 7B Beta", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 4096, + "default_max_tokens": 2048, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "j2-ultra-v1", + "name": "j2-ultra-v1", + "cost_per_1m_in": 18.86184211, + "cost_per_1m_out": 18.50625, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 8192, + "default_max_tokens": 8192, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "o1", + "name": "o1", + "cost_per_1m_in": 15.04761905, + "cost_per_1m_out": 59.99880952, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "o1-2024-12-17", + "name": "o1 (EdenAI)", + "cost_per_1m_in": 52.85069729, + "cost_per_1m_out": 59.05373257, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 100000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "o1", + "name": "o1 (EdenAI)", + "cost_per_1m_in": 63.87612797, + "cost_per_1m_out": 58.7780968, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 100000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "o1-2024-12-17", + "name": "o1 (OpenAI)", + "cost_per_1m_in": 52.1123872, + "cost_per_1m_out": 59.07219032, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 100000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "o1", + "name": "o1 (OpenAI)", + "cost_per_1m_in": 67.32157506, + "cost_per_1m_out": 58.69196062, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 100000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "o1", + "name": "o1 (OpenRouter)", + "cost_per_1m_in": 59.34782609, + "cost_per_1m_out": 58.83662576, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 100000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "o1-mini", + "name": "o1-mini", + "cost_per_1m_in": 3.00952381, + "cost_per_1m_out": 11.99975238, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 65536, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "o1-mini", + "name": "o1-mini (EdenAI Azure)", + "cost_per_1m_in": 2.92634921, + "cost_per_1m_out": 4.35251492, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 65536, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "o1-mini", + "name": "o1-mini (EdenAI Vision)", + "cost_per_1m_in": 2.92634921, + "cost_per_1m_out": 4.35251492, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 65536, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "o1-mini-2024-09-12", + "name": "o1-mini (EdenAI)", + "cost_per_1m_in": 8.01904762, + "cost_per_1m_out": 11.86950476, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 65536, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "o1-mini", + "name": "o1-mini (OpenAI Vision)", + "cost_per_1m_in": 2.7831746, + "cost_per_1m_out": 4.35623746, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 65536, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "o1-mini", + "name": "o1-mini (OpenRouter)", + "cost_per_1m_in": 1.95206349, + "cost_per_1m_out": 4.37784635, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 65536, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "o1-mini-2024-09-12", + "name": "o1-mini (OpenRouter)", + "cost_per_1m_in": 4.58507937, + "cost_per_1m_out": 4.30938794, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 65536, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "o1-mini-2024-09-12", + "name": "o1-mini (Vision)", + "cost_per_1m_in": 9.65714286, + "cost_per_1m_out": 11.82691429, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 65536, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "o1-preview", + "name": "o1-preview (OpenRouter)", + "cost_per_1m_in": 15.04761905, + "cost_per_1m_out": 59.9987619, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "o1-preview-2024-09-12", + "name": "o1-preview (OpenRouter)", + "cost_per_1m_in": 15.04761905, + "cost_per_1m_out": 59.9987619, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "o1-preview", + "name": "o1-preview Vision (EdenAI)", + "cost_per_1m_in": 15.04761905, + "cost_per_1m_out": 59.9987619, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 128000, + "default_max_tokens": 32768, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "o1-pro", + "name": "o1-pro", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 100000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "o1-pro-2025-03-19", + "name": "o1-pro (OpenAI)", + "cost_per_1m_in": 150, + "cost_per_1m_out": 600, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 100000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "o1-pro", + "name": "o1-pro (OpenAI)", + "cost_per_1m_in": 150, + "cost_per_1m_out": 600, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 100000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "o1-pro", + "name": "o1-pro (OpenRouter)", + "cost_per_1m_in": 502.42001641, + "cost_per_1m_out": 590.84147567, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 100000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "o3", + "name": "o3", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 100000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "o3-2025-04-16", + "name": "o3 (OpenAI)", + "cost_per_1m_in": 4.15914684, + "cost_per_1m_out": 7.94602133, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 100000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "o3", + "name": "o3 (OpenAI)", + "cost_per_1m_in": 3.77194422, + "cost_per_1m_out": 7.95570139, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 100000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "o3", + "name": "o3 (OpenRouter)", + "cost_per_1m_in": 3.31911403, + "cost_per_1m_out": 7.96561225, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 100000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "o3-deep-research", + "name": "o3 Deep Research", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 100000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "o3-deep-research", + "name": "o3 Deep Research (OpenAI)", + "cost_per_1m_in": 10, + "cost_per_1m_out": 40, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 100000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "o3-deep-research", + "name": "o3 Deep Research (OpenRouter)", + "cost_per_1m_in": 10, + "cost_per_1m_out": 40, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 100000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "o3-pro-2025-06-10", + "name": "o3 Pro (200K)", + "cost_per_1m_in": 20, + "cost_per_1m_out": 80, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 100000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "o3-mini", + "name": "o3-mini", + "cost_per_1m_in": 2.38498769, + "cost_per_1m_out": 4.36787531, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 100000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "o3-mini-2025-01-31", + "name": "o3-mini (EdenAI Vision)", + "cost_per_1m_in": 2.81812961, + "cost_per_1m_out": 4.35704676, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 100000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "o3-mini", + "name": "o3-mini (EdenAI/Microsoft, Vision)", + "cost_per_1m_in": 2.73150123, + "cost_per_1m_out": 4.35921247, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 100000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "o3-mini", + "name": "o3-mini (EdenAI/OpenAI, Vision)", + "cost_per_1m_in": 2.73150123, + "cost_per_1m_out": 4.35921247, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 100000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "o3-mini-high", + "name": "o3-mini (High Reasoning)", + "cost_per_1m_in": 3.49310911, + "cost_per_1m_out": 4.33621777, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 100000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "o3-mini-2025-01-31", + "name": "o3-mini (OpenAI)", + "cost_per_1m_in": 4.15365053, + "cost_per_1m_out": 4.32365874, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 100000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "o3-mini", + "name": "o3-mini (OpenRouter, File Support)", + "cost_per_1m_in": 3.48949959, + "cost_per_1m_out": 4.33725054, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 100000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "o3-mini", + "name": "o3-mini (Pool)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 100000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "o3-pro", + "name": "o3-pro", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 100000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "o3-pro", + "name": "o3-pro (OpenAI)", + "cost_per_1m_in": 20, + "cost_per_1m_out": 80, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 100000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "o4-mini", + "name": "o4-mini", + "cost_per_1m_in": 2.22977851, + "cost_per_1m_out": 4.37175554, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 100000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "o4-mini", + "name": "o4-mini (EdenAI)", + "cost_per_1m_in": 2.68096801, + "cost_per_1m_out": 4.3604758, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 100000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "o4-mini-2025-04-16", + "name": "o4-mini (EdenAI, Image Output)", + "cost_per_1m_in": 1.93740771, + "cost_per_1m_out": 4.37906481, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 100000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "o4-mini-high", + "name": "o4-mini (High Reasoning)", + "cost_per_1m_in": 3.12132896, + "cost_per_1m_out": 4.34736123, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 100000, + "can_reason": true, + "has_reasoning_efforts": true, + "supports_attachments": true + }, + { + "id": "o4-mini-2025-04-16", + "name": "o4-mini (OpenAI)", + "cost_per_1m_in": 1.97711239, + "cost_per_1m_out": 4.37807219, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 100000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "o4-mini", + "name": "o4-mini (OpenRouter)", + "cost_per_1m_in": 2.18646432, + "cost_per_1m_out": 4.37170666, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 100000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "o4-mini", + "name": "o4-mini (Pool)", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 100000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "o4-mini-deep-research", + "name": "o4-mini Deep Research", + "cost_per_1m_in": 0, + "cost_per_1m_out": 0, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 100000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": false + }, + { + "id": "o4-mini-deep-research-2025-06-26", + "name": "o4-mini Deep Research (200K)", + "cost_per_1m_in": 2, + "cost_per_1m_out": 8, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 100000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "o4-mini-deep-research", + "name": "o4-mini Deep Research (OpenAI)", + "cost_per_1m_in": 2, + "cost_per_1m_out": 8, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 100000, + "can_reason": false, + "has_reasoning_efforts": false, + "supports_attachments": true + }, + { + "id": "o4-mini-deep-research", + "name": "o4-mini Deep Research (OpenRouter)", + "cost_per_1m_in": 8.28209192, + "cost_per_1m_out": -11.39471235, + "cost_per_1m_in_cached": 0, + "cost_per_1m_out_cached": 0, + "context_window": 200000, + "default_max_tokens": 100000, + "can_reason": true, + "has_reasoning_efforts": false, + "supports_attachments": true + } + ] +} diff --git a/internal/providers/providers.go b/internal/providers/providers.go index 83b35230..414bc581 100644 --- a/internal/providers/providers.go +++ b/internal/providers/providers.go @@ -57,6 +57,9 @@ var huggingFaceConfig []byte //go:embed configs/aihubmix.json var aiHubMixConfig []byte +//go:embed configs/apipie.json +var apipieConfig []byte + // ProviderFunc is a function that returns a Provider. type ProviderFunc func() catwalk.Provider @@ -77,6 +80,7 @@ var providerRegistry = []ProviderFunc{ deepSeekProvider, huggingFaceProvider, aiHubMixProvider, + apipieProvider, } // GetAll returns all registered providers. @@ -160,3 +164,7 @@ func huggingFaceProvider() catwalk.Provider { func aiHubMixProvider() catwalk.Provider { return loadProviderFromConfig(aiHubMixConfig) } + +func apipieProvider() catwalk.Provider { + return loadProviderFromConfig(apipieConfig) +}