Skip to content

Commit c6816d8

Browse files
committed
Review comments
1 parent d468c81 commit c6816d8

File tree

4 files changed

+35
-22
lines changed

4 files changed

+35
-22
lines changed

docs/reference/tools-reference/open_source_llm_tool.md

+10-5
Original file line numberDiff line numberDiff line change
@@ -24,13 +24,18 @@ Setup connections to provisioned resources in prompt flow.
2424
|-------------|----------|----------|----------|-------------|
2525
| CustomConnection | Required | Required | - | - |
2626

27-
Instructions to create a Custom Connection [can be found here.](https://learn.microsoft.com/en-us/azure/machine-learning/prompt-flow/how-to-integrate-with-langchain?view=azureml-api-2#create-a-connection)
27+
Instructions to create a Custom Connection [can be found here.](https://microsoft.github.io/promptflow/how-to-guides/manage-connections.html#create-a-connection)
2828

2929
The keys to set are:
3030

31-
1. endpoint_url
32-
2. endpoint_api_key
33-
3. model_family
31+
1. **endpoint_url**
32+
- This value can be found at the previously created Inferencing endpoint.
33+
2. **endpoint_api_key**
34+
- Ensure to set this as a secret value.
35+
- This value can be found at the previously created Inferencing endpoint.
36+
3. **model_family**
37+
- Supported values: LLAMA, DOLLY, GPT2, or FALCON
38+
- This value is dependent on the type of deployment you are targetting.
3439

3540
*These values can be found at the previously created Inferencing endpoint.*
3641

@@ -56,4 +61,4 @@ The keys to set are:
5661
1. Choose a Model from the catalog and deploy.
5762
2. Setup and select the connections to model deployment.
5863
3. Configure the model api and its parameters
59-
4. Prepare the Prompt with [guidance](./prompt-tool.md#how-to-write-prompt).
64+
4. Prepare the Prompt with [guidance](prompt-tool.md#how-to-write-prompt).

src/promptflow-tools/promptflow/tools/open_source_llm.py

+15-15
Original file line numberDiff line numberDiff line change
@@ -59,20 +59,20 @@ def format_generic_response_payload(output: bytes, response_key: str) -> str:
5959
except KeyError as e:
6060
if response_key is None:
6161
message = f"""Expected the response to fit the following schema:
62-
`[
63-
<text>
64-
]`
65-
Instead, received {response_json} and access failed at key `{e}`.
66-
"""
62+
`[
63+
<text>
64+
]`
65+
Instead, received {response_json} and access failed at key `{e}`.
66+
"""
6767
else:
6868
message = f"""Expected the response to fit the following schema:
69-
`[
70-
{{
71-
"{response_key}": <text>
72-
}}
73-
]`
74-
Instead, received {response_json} and access failed at key `{e}`.
75-
"""
69+
`[
70+
{{
71+
"{response_key}": <text>
72+
}}
73+
]`
74+
Instead, received {response_json} and access failed at key `{e}`.
75+
"""
7676
raise OpenSourceLLMUserError(message=message)
7777

7878

@@ -364,22 +364,22 @@ def __init__(self, connection: CustomConnection):
364364
accepted_keys = ",".join([key for key in self.REQUIRED_CONFIG_KEYS])
365365
raise OpenSourceLLMKeyValidationError(
366366
message=f"""Required key `{key}` not found in given custom connection.
367-
Required keys are: {accepted_keys}."""
367+
Required keys are: {accepted_keys}."""
368368
)
369369
for key in self.REQUIRED_SECRET_KEYS:
370370
if key not in conn_dict:
371371
accepted_keys = ",".join([key for key in self.REQUIRED_SECRET_KEYS])
372372
raise OpenSourceLLMKeyValidationError(
373373
message=f"""Required secret key `{key}` not found in given custom connection.
374-
Required keys are: {accepted_keys}."""
374+
Required keys are: {accepted_keys}."""
375375
)
376376
try:
377377
self.model_family = ModelFamily[connection.configs['model_family']]
378378
except KeyError:
379379
accepted_models = ",".join([model.name for model in ModelFamily])
380380
raise OpenSourceLLMKeyValidationError(
381381
message=f"""Given model_family '{connection.configs['model_family']}' not recognized.
382-
Supported models are: {accepted_models}."""
382+
Supported models are: {accepted_models}."""
383383
)
384384
self.connection = connection
385385

src/promptflow-tools/tests/conftest.py

+2-2
Original file line numberDiff line numberDiff line change
@@ -8,7 +8,7 @@
88
# Avoid circular dependencies: Use import 'from promptflow._internal' instead of 'from promptflow'
99
# since the code here is in promptflow namespace as well
1010
from promptflow._internal import ConnectionManager
11-
from promptflow.connections import CustomConnection, OpenAIConnection
11+
from promptflow.connections import CustomConnection, OpenAIConnection, SerpConnection
1212
from promptflow.tools.aoai import AzureOpenAI
1313

1414
PROMOTFLOW_ROOT = Path(__file__).absolute().parents[1]
@@ -57,7 +57,7 @@ def skip_if_no_key(request, mocker):
5757
conn_name = request.node.get_closest_marker('skip_if_no_key').args[0]
5858
connection = request.getfixturevalue(conn_name)
5959
# if dummy placeholder key, skip.
60-
if isinstance(connection, OpenAIConnection):
60+
if isinstance(connection, OpenAIConnection) or isinstance(connection, SerpConnection):
6161
if "-api-key" in connection.api_key:
6262
pytest.skip('skipped because no key')
6363
elif isinstance(connection, CustomConnection):

src/promptflow-tools/tests/test_open_source_llm.py

+8
Original file line numberDiff line numberDiff line change
@@ -59,6 +59,7 @@ def test_open_source_llm_con_url_chat(self, gpt2_custom_connection):
5959
os.call(self.chat_prompt, API.CHAT)
6060
assert exc_info.value.message == """Required key `endpoint_url` not found in given custom connection.
6161
Required keys are: endpoint_url,model_family."""
62+
assert exc_info.value.error_codes == "UserError/ToolValidationError/OpenSourceLLMKeyValidationError".split("/")
6263

6364
@pytest.mark.skip_if_no_key("gpt2_custom_connection")
6465
def test_open_source_llm_con_key_chat(self, gpt2_custom_connection):
@@ -70,6 +71,7 @@ def test_open_source_llm_con_key_chat(self, gpt2_custom_connection):
7071
"Required secret key `endpoint_api_key` "
7172
+ """not found in given custom connection.
7273
Required keys are: endpoint_api_key.""")
74+
assert exc_info.value.error_codes == "UserError/ToolValidationError/OpenSourceLLMKeyValidationError".split("/")
7375

7476
@pytest.mark.skip_if_no_key("gpt2_custom_connection")
7577
def test_open_source_llm_con_model_chat(self, gpt2_custom_connection):
@@ -79,6 +81,7 @@ def test_open_source_llm_con_model_chat(self, gpt2_custom_connection):
7981
os.call(self.completion_prompt, API.COMPLETION)
8082
assert exc_info.value.message == """Required key `model_family` not found in given custom connection.
8183
Required keys are: endpoint_url,model_family."""
84+
assert exc_info.value.error_codes == "UserError/ToolValidationError/OpenSourceLLMKeyValidationError".split("/")
8285

8386
def test_open_source_llm_escape_chat(self):
8487
danger = r"The quick \brown fox\tjumped\\over \the \\boy\r\n"
@@ -117,6 +120,7 @@ def test_open_source_llm_llama_parse_system_not_accepted(self):
117120
+ "API, please select the appropriate API type and deployment name. If you do intend to use the Chat "
118121
+ "API, please refer to the guideline at https://aka.ms/pfdoc/chat-prompt or view the samples in our "
119122
+ "gallery that contain 'Chat' in the name.")
123+
assert exc_info.value.error_codes == "UserError/OpenSourceLLMUserError".split("/")
120124

121125
def test_open_source_llm_llama_parse_ignore_whitespace(self):
122126
bad_chat_prompt = f"""system:
@@ -135,6 +139,7 @@ def test_open_source_llm_llama_parse_ignore_whitespace(self):
135139
+ "appropriate API type and deployment name. If you do intend to use the Chat API, please refer to the "
136140
+ "guideline at https://aka.ms/pfdoc/chat-prompt or view the samples in our gallery that contain 'Chat' "
137141
+ "in the name.")
142+
assert exc_info.value.error_codes == "UserError/OpenSourceLLMUserError".split("/")
138143

139144
def test_open_source_llm_llama_parse_chat_with_comp(self):
140145
with pytest.raises(OpenSourceLLMUserError) as exc_info:
@@ -146,6 +151,7 @@ def test_open_source_llm_llama_parse_chat_with_comp(self):
146151
+ "intend to use the Completion API, please select the appropriate API type and deployment name. If you do "
147152
+ "intend to use the Chat API, please refer to the guideline at https://aka.ms/pfdoc/chat-prompt or view "
148153
+ "the samples in our gallery that contain 'Chat' in the name.")
154+
assert exc_info.value.error_codes == "UserError/OpenSourceLLMUserError".split("/")
149155

150156
@pytest.mark.skip_if_no_key("gpt2_custom_connection")
151157
def test_open_source_llm_llama_endpoint_miss(self, gpt2_custom_connection):
@@ -158,6 +164,7 @@ def test_open_source_llm_llama_endpoint_miss(self, gpt2_custom_connection):
158164
assert exc_info.value.message == (
159165
"Exception hit calling Oneline Endpoint: "
160166
+ "HTTPError: HTTP Error 424: Failed Dependency")
167+
assert exc_info.value.error_codes == "UserError/OpenSourceLLMOnlineEndpointError".split("/")
161168

162169
@pytest.mark.skip_if_no_key("gpt2_custom_connection")
163170
def test_open_source_llm_llama_deployment_miss(self, gpt2_custom_connection):
@@ -170,3 +177,4 @@ def test_open_source_llm_llama_deployment_miss(self, gpt2_custom_connection):
170177
assert exc_info.value.message == (
171178
"Exception hit calling Oneline Endpoint: "
172179
+ "HTTPError: HTTP Error 404: Not Found")
180+
assert exc_info.value.error_codes == "UserError/OpenSourceLLMOnlineEndpointError".split("/")

0 commit comments

Comments
 (0)