Skip to content

Commit b84c210

Browse files
gustavocidornelaswhoseoyster
authored andcommitted
Add openlayer_inference_pipeline_id parameter to the OpenAIMonito
1 parent 198eabc commit b84c210

File tree

2 files changed

+51
-22
lines changed

2 files changed

+51
-22
lines changed

CHANGELOG.md

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.
88
## Unreleased
99

1010
### Added
11+
* Added `openlayer_inference_pipeline_id` as an optional parameter to the `OpenAIMonitor`. This is an alternative to `openlayer_inference_pipeline_name` and `openlayer_inference_project_name` parameters for identifying the inference pipeline on the platform.
1112
* Added `monitor_output_only` as an argument to the OpenAI `llm_monitor`. If set to `True`, the monitor will only record the output of the model, and not the input.
1213
* Added `costColumnName` as an optional field in the config for LLM data.
1314

openlayer/llm_monitors.py

Lines changed: 50 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@
99

1010
import openlayer
1111

12-
from . import tasks, utils
12+
from . import inference_pipelines, tasks, utils
1313

1414
logger = logging.getLogger(__name__)
1515

@@ -43,6 +43,11 @@ class OpenAIMonitor:
4343
environment variable ``OPENLAYER_INFERENCE_PIPELINE_NAME``. This is required if
4444
`publish` is set to True and you gave your inference pipeline a name different
4545
than the default.
46+
openlayer_inference_pipeline_id : str, optional
47+
The Openlayer inference pipeline id. If not provided, it is read from the
48+
environment variable ``OPENLAYER_INFERENCE_PIPELINE_ID``.
49+
This is only needed if you do not want to specify an inference pipeline name and
50+
project name, and you want to load the inference pipeline directly from its id.
4651
4752
Examples
4853
--------
@@ -158,17 +163,19 @@ def __init__(
158163
openlayer_api_key: Optional[str] = None,
159164
openlayer_project_name: Optional[str] = None,
160165
openlayer_inference_pipeline_name: Optional[str] = None,
166+
openlayer_inference_pipeline_id: Optional[str] = None,
161167
) -> None:
162168
# Openlayer setup
163169
self.openlayer_api_key: str = None
164170
self.openlayer_project_name: str = None
165171
self.openlayer_inference_pipeline_name: str = None
166-
self.inference_pipeline: openlayer.InferencePipeline = None
172+
self.inference_pipeline: inference_pipelines.InferencePipeline = None
167173
self._initialize_openlayer(
168174
publish=publish,
169175
api_key=openlayer_api_key,
170176
project_name=openlayer_project_name,
171177
inference_pipeline_name=openlayer_inference_pipeline_name,
178+
inference_pipeline_id=openlayer_inference_pipeline_id,
172179
)
173180
self._load_inference_pipeline()
174181

@@ -198,6 +205,7 @@ def _initialize_openlayer(
198205
api_key: Optional[str] = None,
199206
project_name: Optional[str] = None,
200207
inference_pipeline_name: Optional[str] = None,
208+
inference_pipeline_id: Optional[str] = None,
201209
) -> None:
202210
"""Initializes the Openlayer attributes, if credentials are provided."""
203211
# Get credentials from environment variables if not provided
@@ -209,39 +217,59 @@ def _initialize_openlayer(
209217
inference_pipeline_name = utils.get_env_variable(
210218
"OPENLAYER_INFERENCE_PIPELINE_NAME"
211219
)
212-
if publish and (api_key is None or project_name is None):
213-
raise ValueError(
214-
"To publish data to Openlayer, you must provide an API key and "
215-
"a project name. This can be done by setting the environment "
216-
"variables `OPENLAYER_API_KEY` and `OPENLAYER_PROJECT_NAME`, or by "
217-
"passing them as arguments to the OpenAIMonitor constructor "
218-
"(`openlayer_api_key` and `openlayer_project_name`, respectively)."
220+
if inference_pipeline_id is None:
221+
inference_pipeline_id = utils.get_env_variable(
222+
"OPENLAYER_INFERENCE_PIPELINE_ID"
219223
)
224+
if publish and (api_key is None or project_name is None):
225+
if inference_pipeline_id is None:
226+
raise ValueError(
227+
"To publish data to Openlayer, you must provide an API key and "
228+
"a project name. This can be done by setting the environment "
229+
"variables `OPENLAYER_API_KEY` and `OPENLAYER_PROJECT_NAME`, or by "
230+
"passing them as arguments to the OpenAIMonitor constructor "
231+
"(`openlayer_api_key` and `openlayer_project_name`, respectively)."
232+
)
220233

221234
self.openlayer_api_key = api_key
222235
self.openlayer_project_name = project_name
223236
self.openlayer_inference_pipeline_name = inference_pipeline_name
237+
self.openlayer_inference_pipeline_id = inference_pipeline_id
224238

225239
def _load_inference_pipeline(self) -> None:
226240
"""Load inference pipeline from the Openlayer platform.
227241
228242
If no platform/project information is provided, it is set to None.
229243
"""
230244
inference_pipeline = None
231-
if self.openlayer_api_key and self.openlayer_project_name:
232-
with utils.HidePrints():
233-
client = openlayer.OpenlayerClient(
234-
api_key=self.openlayer_api_key, verbose=False
235-
)
236-
project = client.create_project(
237-
name=self.openlayer_project_name, task_type=tasks.TaskType.LLM
245+
if self.openlayer_api_key:
246+
client = openlayer.OpenlayerClient(
247+
api_key=self.openlayer_api_key, verbose=False
248+
)
249+
if self.openlayer_inference_pipeline_id:
250+
# Load inference pipeline directly from the id
251+
inference_pipeline = inference_pipelines.InferencePipeline(
252+
client=client,
253+
upload=None,
254+
json={
255+
"id": self.openlayer_inference_pipeline_id,
256+
"projectId": None,
257+
},
258+
task_type=tasks.TaskType.LLM,
238259
)
239-
if self.openlayer_inference_pipeline_name:
240-
inference_pipeline = project.load_inference_pipeline(
241-
name=self.openlayer_inference_pipeline_name
242-
)
243-
else:
244-
inference_pipeline = project.create_inference_pipeline()
260+
else:
261+
if self.openlayer_project_name:
262+
with utils.HidePrints():
263+
project = client.create_project(
264+
name=self.openlayer_project_name,
265+
task_type=tasks.TaskType.LLM,
266+
)
267+
if self.openlayer_inference_pipeline_name:
268+
inference_pipeline = project.load_inference_pipeline(
269+
name=self.openlayer_inference_pipeline_name
270+
)
271+
else:
272+
inference_pipeline = project.create_inference_pipeline()
245273

246274
self.inference_pipeline = inference_pipeline
247275

0 commit comments

Comments
 (0)