Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion DESCRIPTION
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
Package: llm.api
Title: Minimal LLM Chat Interface
Version: 0.1.2
Version: 0.1.2.1
Authors@R: c(
person("Troy", "Hernandez", role = c("aut", "cre"),
email = "troy@cornball.ai",
Expand Down
1 change: 1 addition & 0 deletions NAMESPACE
Original file line number Diff line number Diff line change
Expand Up @@ -20,5 +20,6 @@ export(mcp_start)
export(mcp_tools)
export(mcp_tools_for_api)
export(mcp_tools_for_claude)
export(provider_default_model)

S3method(print,mcp_connection)
7 changes: 7 additions & 0 deletions NEWS.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,10 @@
# llm.api 0.1.2.1

* New exported helper `provider_default_model(provider)`. Returns the
model id `chat()` falls back to when no model is specified, so client
code can display the resolved model upfront without duplicating the
lookup table or reaching into internals.

# llm.api 0.1.2

* `chat()` now returns `$thinking` and `$finish_reason`. Reasoning models
Expand Down
18 changes: 18 additions & 0 deletions R/providers.R
Original file line number Diff line number Diff line change
Expand Up @@ -69,6 +69,24 @@
)
}

#' Default model for a provider
#'
#' Returns the model name `chat()` falls back to when the caller
#' doesn't specify one. Useful for client code that wants to display
#' the resolved model upfront (e.g., in a status line) without
#' duplicating the lookup table.
#'
#' @param provider Character. One of `"openai"`, `"anthropic"`,
#' `"moonshot"`, `"ollama"`.
#' @return Character. The default model id for that provider.
#' @export
#' @examples
#' provider_default_model("anthropic")
#' provider_default_model("moonshot")
provider_default_model <- function(provider) {
.get_provider_config(provider)$default_model
}

#' Chat with OpenAI
#'
#' Convenience wrapper for 'OpenAI' models.
Expand Down
9 changes: 9 additions & 0 deletions inst/tinytest/test_providers.R
Original file line number Diff line number Diff line change
Expand Up @@ -76,3 +76,12 @@ expect_null(cfg$api_key)

# Unknown provider errors
expect_error(llm.api:::.get_provider_config("unknown"), pattern = "Unknown provider")

# provider_default_model() exposes the default model id for each
# supported provider so client code (e.g., status lines) can resolve it
# upfront without reaching into internals.
expect_equal(provider_default_model("openai"), "gpt-4o-mini")
expect_equal(provider_default_model("anthropic"), "claude-sonnet-4-6")
expect_equal(provider_default_model("moonshot"), "kimi-k2")
expect_equal(provider_default_model("ollama"), "llama3.2")
expect_error(provider_default_model("nonsense"), pattern = "Unknown provider")
24 changes: 24 additions & 0 deletions man/provider_default_model.Rd
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
% tinyrox says don't edit this manually, but it can't stop you!
\name{provider_default_model}
\alias{provider_default_model}
\title{Default model for a provider}
\usage{
provider_default_model(provider)
}
\arguments{
\item{provider}{Character. One of `"openai"`, `"anthropic"`,
`"moonshot"`, `"ollama"`.}
}
\value{
Character. The default model id for that provider.
}
\description{
Returns the model name `chat()` falls back to when the caller
doesn't specify one. Useful for client code that wants to display
the resolved model upfront (e.g., in a status line) without
duplicating the lookup table.
}
\examples{
provider_default_model("anthropic")
provider_default_model("moonshot")
}