Skip to content

Commit 9676992

Browse files
committed
Merge branch 'PYTHON' into aacebo/remove-semantic-kernel
2 parents 0a0de45 + 1208dbb commit 9676992

File tree

16 files changed

+653
-4
lines changed

16 files changed

+653
-4
lines changed

python/packages/ai/teams/ai/data_sources/data_source.py

+4-2
Original file line numberDiff line numberDiff line change
@@ -17,8 +17,10 @@ class DataSource(ABC):
1717
A data source that can be used to render text that's added to a prompt.
1818
"""
1919

20-
name: str
21-
"Name of the data source."
20+
@property
21+
@abstractmethod
22+
def name(self) -> str:
23+
"Name of the data source."
2224

2325
@abstractmethod
2426
async def render_data(

python/packages/ai/teams/ai/data_sources/text_data_source.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -38,7 +38,7 @@ def __init__(self, name: str, text: str) -> None:
3838
self._text = text
3939

4040
@property
41-
def get_name(self) -> str:
41+
def name(self) -> str:
4242
"""
4343
Name of the data source.
4444
"""

python/packages/ai/teams/ai/promptsv2/__init__.py

+6-1
Original file line numberDiff line numberDiff line change
@@ -12,8 +12,13 @@
1212
from .group_section import GroupSection
1313
from .layout_engine import LayoutEngine
1414
from .message import ImageContentPart, ImageUrl, Message, TextContentPart
15-
from .prompt_section import PromptFunctions, PromptSection
15+
from .prompt import Prompt
16+
from .prompt_functions import PromptFunction, PromptFunctions
17+
from .prompt_manager import PromptManager
18+
from .prompt_manager_options import PromptManagerOptions
19+
from .prompt_section import PromptSection
1620
from .prompt_section_base import PromptSectionBase
21+
from .prompt_template import PromptTemplate
1722
from .rendered_prompt_section import RenderedPromptSection
1823
from .system_message import SystemMessage
1924
from .template_section import TemplateSection

python/packages/ai/teams/ai/promptsv2/completion_config.py

+17
Original file line numberDiff line numberDiff line change
@@ -69,3 +69,20 @@ class CompletionConfig:
6969
stop_sequences: Optional[List[str]] = None
7070
temperature: float = 0
7171
top_p: float = 0
72+
73+
@classmethod
74+
def from_dict(cls, data: dict) -> "CompletionConfig":
75+
return cls(
76+
completion_type=data.get("completion_type"),
77+
frequency_penalty=data.get("frequency_penalty", 0),
78+
include_history=data.get("include_history", True),
79+
include_input=data.get("include_input", True),
80+
include_images=data.get("include_images", False),
81+
max_tokens=data.get("max_tokens", 150),
82+
max_input_tokens=data.get("max_input_tokens", 2048),
83+
model=data.get("model"),
84+
presence_penalty=data.get("presence_penalty", 0),
85+
stop_sequences=data.get("stop_sequences"),
86+
temperature=data.get("temperature", 0),
87+
top_p=data.get("top_p", 0),
88+
)
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,301 @@
1+
"""
2+
Copyright (c) Microsoft Corporation. All rights reserved.
3+
Licensed under the MIT License.
4+
"""
5+
6+
import json
7+
import os
8+
from copy import deepcopy
9+
from pathlib import Path
10+
from typing import Any, Dict, List
11+
12+
from botbuilder.core import TurnContext
13+
14+
from ...app_error import ApplicationError
15+
from ...state import Memory
16+
from ..data_sources import DataSource
17+
from ..tokenizers import Tokenizer
18+
from .conversation_history import ConversationHistory
19+
from .group_section import GroupSection
20+
from .prompt import Prompt
21+
from .prompt_functions import PromptFunction, PromptFunctions
22+
from .prompt_manager_options import PromptManagerOptions
23+
from .prompt_section import PromptSection
24+
from .prompt_template import PromptTemplate
25+
from .prompt_template_config import PromptTemplateConfig
26+
from .template_section import TemplateSection
27+
from .user_input_message import UserInputMessage
28+
from .user_message import UserMessage
29+
30+
31+
class PromptManager(PromptFunctions):
32+
_options: PromptManagerOptions
33+
_data_sources: Dict[str, DataSource]
34+
_functions: Dict[str, PromptFunction]
35+
_prompts: Dict[str, PromptTemplate]
36+
37+
def __init__(self, options: PromptManagerOptions):
38+
"""
39+
Creates a new 'PromptManager' instance.
40+
41+
Args:
42+
options (PromptManagerOptions): Options used to configure the prompt manager.
43+
"""
44+
self._options = options
45+
self._data_sources = {}
46+
self._functions = {}
47+
self._prompts = {}
48+
49+
@property
50+
def options(self) -> PromptManagerOptions:
51+
"""
52+
Gets the configured prompt manager options.
53+
"""
54+
return self._options
55+
56+
def add_data_source(self, data_source: DataSource) -> "PromptManager":
57+
"""
58+
Registers a new data source with the prompt manager.
59+
60+
Args:
61+
data_source (DataSource): Data source to add.
62+
63+
Returns:
64+
PromptManager: The prompt manager for chaining.
65+
66+
Raises:
67+
ApplicationError: If a data source with the same name already exists.
68+
"""
69+
if data_source.name in self._data_sources:
70+
raise ApplicationError(f"DataSource '{data_source.name}' already exists.")
71+
self._data_sources[data_source.name] = data_source
72+
return self
73+
74+
def get_data_source(self, name: str) -> DataSource:
75+
"""
76+
Looks up a data source by name.
77+
78+
Args:
79+
name (str): Name of the data source to lookup.
80+
81+
Returns:
82+
DataSource: The data source.
83+
84+
Raises:
85+
ApplicationError: If the data source is not found.
86+
"""
87+
if name not in self._data_sources:
88+
raise ApplicationError(f"DataSource '{name}' not found.")
89+
return self._data_sources[name]
90+
91+
def has_data_source(self, name: str) -> bool:
92+
"""
93+
Checks for the existence of a named data source.
94+
95+
Args:
96+
name (str): Name of the data source to lookup.
97+
98+
Returns:
99+
bool: True if the data source exists, False otherwise.
100+
"""
101+
return name in self._data_sources
102+
103+
def add_function(self, name: str, function: PromptFunction) -> "PromptManager":
104+
"""
105+
Registers a new prompt template function with the prompt manager.
106+
107+
Args:
108+
name (str): Name of the function to add.
109+
fn (PromptFunction): Function to add.
110+
111+
Returns:
112+
PromptManager: The prompt manager for chaining.
113+
114+
Raises:
115+
ApplicationError: If a function with the same name already exists.
116+
"""
117+
if name in self._functions:
118+
raise ApplicationError(f"Function '{name}' already exists.")
119+
self._functions[name] = function
120+
return self
121+
122+
def get_function(self, name: str) -> PromptFunction:
123+
"""
124+
Looks up a prompt template function by name.
125+
126+
Args:
127+
name (str): Name of the function to lookup.
128+
129+
Returns:
130+
PromptFunction: The function.
131+
132+
Raises:
133+
ApplicationError: If the function is not found.
134+
"""
135+
if name not in self._functions:
136+
raise ApplicationError(f"Function '{name}' not found.")
137+
return self._functions[name]
138+
139+
def has_function(self, name: str) -> bool:
140+
"""
141+
Checks for the existence of a named prompt template function.
142+
143+
Args:
144+
name (str): Name of the function to lookup.
145+
146+
Returns:
147+
bool: True if the function exists, False otherwise.
148+
"""
149+
return name in self._functions
150+
151+
async def invoke_function(
152+
self, name: str, context: TurnContext, memory: Memory, tokenizer: Tokenizer, args: List[str]
153+
) -> Any:
154+
"""
155+
Invokes a prompt template function by name.
156+
157+
Args:
158+
name (str): Name of the function to invoke.
159+
context (TurnContext): Turn context for the current turn of conversation with the user.
160+
memory (Memory): An interface for accessing state values.
161+
tokenizer (Tokenizer): Tokenizer to use when rendering the prompt.
162+
args (List[str]): Arguments to pass to the function.
163+
164+
Returns:
165+
Any: Value returned by the function.
166+
"""
167+
function = self.get_function(name)
168+
return await function(context, memory, self, tokenizer, args)
169+
170+
def add_prompt(self, prompt: PromptTemplate) -> "PromptManager":
171+
"""
172+
Registers a new prompt template with the prompt manager.
173+
174+
Args:
175+
prompt (PromptTemplate): Prompt template to add.
176+
177+
Returns:
178+
PromptManager: The prompt manager for chaining.
179+
180+
Raises:
181+
ApplicationError: If a prompt with the same name already exists.
182+
"""
183+
if prompt.name in self._prompts:
184+
raise ApplicationError(
185+
(
186+
"The PromptManager.add_prompt() method was called with a "
187+
f"previously registered prompt named '{prompt.name}'."
188+
)
189+
)
190+
191+
# Clone and cache prompt
192+
self._prompts[prompt.name] = deepcopy(prompt)
193+
return self
194+
195+
async def get_prompt(self, name) -> PromptTemplate:
196+
"""
197+
Loads a named prompt template from the filesystem.
198+
199+
The template will be pre-parsed and cached for use when the template is rendered by name.
200+
201+
Any augmentations will also be added to the template.
202+
203+
Args:
204+
name (str): Name of the prompt to load.
205+
206+
Returns:
207+
PromptTemplate: The loaded and parsed prompt template.
208+
209+
Raises:
210+
ApplicationError: If the prompt is not found or there is an error loading it.
211+
"""
212+
if name not in self._prompts:
213+
template_name = name
214+
215+
# Load template from disk
216+
folder = os.path.join(self._options.prompts_folder, name)
217+
config_file = os.path.join(folder, "config.json")
218+
prompt_file = os.path.join(folder, "skprompt.txt")
219+
220+
# Load prompt config
221+
try:
222+
with open(config_file, "r", encoding="utf-8") as file:
223+
template_config = PromptTemplateConfig.from_dict(json.load(file))
224+
except Exception as e:
225+
raise ApplicationError(
226+
(
227+
"PromptManager.get_prompt(): an error occurred while loading "
228+
f"'{config_file}'. The file is either invalid or missing."
229+
)
230+
) from e
231+
232+
# Load prompt text
233+
sections: List[PromptSection] = []
234+
try:
235+
with open(prompt_file, "r", encoding="utf-8") as file:
236+
prompt = file.read()
237+
sections.append(TemplateSection(prompt, self._options.role))
238+
except Exception as e:
239+
raise ApplicationError(
240+
(
241+
"PromptManager.get_prompt(): an error occurred while loading "
242+
f"'{prompt_file}'. The file is either invalid or missing."
243+
)
244+
) from e
245+
246+
# Migrate the templates config as needed
247+
self._update_config(template_config)
248+
249+
# Group everything into a system message
250+
sections = [GroupSection(sections, "system")]
251+
252+
# Include conversation history
253+
# - The ConversationHistory section will use the remaining tokens from
254+
# max_input_tokens.
255+
if template_config.completion.include_history:
256+
sections.append(
257+
ConversationHistory(
258+
f"conversation.{template_name}_history",
259+
self._options.max_conversation_history_tokens,
260+
)
261+
)
262+
263+
# Include user input
264+
if template_config.completion.include_images:
265+
sections.append(UserInputMessage(self._options.max_input_tokens))
266+
elif template_config.completion.include_input:
267+
sections.append(UserMessage("{{$temp.input}}", self._options.max_input_tokens))
268+
269+
template = PromptTemplate(template_name, Prompt(sections), template_config)
270+
271+
# Cache loaded template
272+
self._prompts[name] = template
273+
274+
return self._prompts[name]
275+
276+
def has_prompt(self, name: str) -> bool:
277+
"""
278+
Checks for the existence of a named prompt.
279+
280+
Args:
281+
name (str): Name of the prompt to check.
282+
283+
Returns:
284+
bool: True if the prompt exists, False otherwise.
285+
"""
286+
if name not in self._prompts:
287+
folder = os.path.join(self._options.prompts_folder, name)
288+
prompt_file = os.path.join(folder, "skprompt.txt")
289+
290+
return Path(prompt_file).exists()
291+
return True
292+
293+
def _update_config(self, template_config: PromptTemplateConfig):
294+
# Migrate old schema
295+
if template_config.schema == 1:
296+
template_config.schema = 1.1
297+
if (
298+
template_config.default_backends is not None
299+
and len(template_config.default_backends) > 0
300+
):
301+
template_config.completion.model = template_config.default_backends[0]

0 commit comments

Comments
 (0)