diff --git a/forecasting_tools/ai_models/model_archetypes/openai_text_model.py b/forecasting_tools/ai_models/model_archetypes/openai_text_model.py index 7a08469..81c6488 100644 --- a/forecasting_tools/ai_models/model_archetypes/openai_text_model.py +++ b/forecasting_tools/ai_models/model_archetypes/openai_text_model.py @@ -22,7 +22,11 @@ class OpenAiTextToTextModel(TraditionalOnlineLlm, ABC): _OPENAI_ASYNC_CLIENT = AsyncOpenAI( - api_key=os.getenv("OPENAI_API_KEY"), + api_key=( + os.getenv("OPENAI_API_KEY") + if os.getenv("OPENAI_API_KEY") is not None + else "fake_key_so_it_doesn't_error_on_initialization" + ), max_retries=0, # Retry is implemented locally ) diff --git a/forecasting_tools/ai_models/model_archetypes/perplexity_text_model.py b/forecasting_tools/ai_models/model_archetypes/perplexity_text_model.py index 677ba38..baa98f1 100644 --- a/forecasting_tools/ai_models/model_archetypes/perplexity_text_model.py +++ b/forecasting_tools/ai_models/model_archetypes/perplexity_text_model.py @@ -26,7 +26,11 @@ class PerplexityTextModel(OpenAiTextToTextModel, PricedPerRequest, ABC): PRICE_PER_TOKEN: float PERPLEXITY_API_KEY = os.getenv("PERPLEXITY_API_KEY") _OPENAI_ASYNC_CLIENT = AsyncOpenAI( - api_key=PERPLEXITY_API_KEY, + api_key=( + PERPLEXITY_API_KEY + if PERPLEXITY_API_KEY is not None + else "fake_key_so_it_doesn't_error_on_initialization" + ), base_url="https://api.perplexity.ai", max_retries=0, # Retry is implemented locally )