-
Notifications
You must be signed in to change notification settings - Fork 77
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
1 parent
3b6d651
commit 0d536db
Showing
12 changed files
with
715 additions
and
941 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,2 +1,3 @@ | ||
from .base import Runtime, AsyncRuntime | ||
from ._openai import OpenAIChatRuntime, OpenAIVisionRuntime, AsyncOpenAIChatRuntime | ||
from ._litellm import LiteLLMChatRuntime, AsyncLiteLLMChatRuntime |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,49 +1,7 @@ | ||
import os | ||
|
||
from pydantic import Field | ||
|
||
from ._litellm import AsyncLiteLLMChatRuntime, LiteLLMChatRuntime, LiteLLMVisionRuntime | ||
|
||
|
||
class OpenAIChatRuntime(LiteLLMChatRuntime): | ||
""" | ||
Runtime that uses [OpenAI API](https://openai.com/) and chat completion | ||
models to perform the skill. | ||
Attributes: | ||
inference_settings (LiteLLMInferenceSettings): Common inference settings for LiteLLM. | ||
""" | ||
|
||
# TODO does it make any sense for this to be optional? | ||
api_key: str = Field(default=os.getenv("OPENAI_API_KEY")) | ||
|
||
|
||
class AsyncOpenAIChatRuntime(AsyncLiteLLMChatRuntime): | ||
""" | ||
Runtime that uses [OpenAI API](https://openai.com/) and chat completion | ||
models to perform the skill. It uses async calls to OpenAI API. | ||
Attributes: | ||
inference_settings (LiteLLMInferenceSettings): Common inference settings for LiteLLM. | ||
""" | ||
|
||
api_key: str = Field(default=os.getenv("OPENAI_API_KEY")) | ||
|
||
|
||
class OpenAIVisionRuntime(LiteLLMVisionRuntime): | ||
""" | ||
Runtime that uses [OpenAI API](https://openai.com/) and vision models to | ||
perform the skill. | ||
Only compatible with OpenAI API version 1.0.0 or higher. | ||
""" | ||
|
||
api_key: str = Field(default=os.getenv("OPENAI_API_KEY")) | ||
# NOTE this check used to exist in OpenAIVisionRuntime.record_to_record, | ||
# but doesn't seem to have a definition | ||
# def init_runtime(self) -> 'Runtime': | ||
# if not check_if_new_openai_version(): | ||
# raise NotImplementedError( | ||
# f'{self.__class__.__name__} requires OpenAI API version 1.0.0 or higher.' | ||
# ) | ||
# super().init_runtime() | ||
# litellm already reads the OPENAI_API_KEY env var, which was the reason for this class | ||
OpenAIChatRuntime = LiteLLMChatRuntime | ||
AsyncOpenAIChatRuntime = AsyncLiteLLMChatRuntime | ||
OpenAIVisionRuntime = LiteLLMVisionRuntime |
Oops, something went wrong.