Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[PY] feat: add prompt manager #1233

Merged
merged 8 commits into from
Feb 5, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
6 changes: 4 additions & 2 deletions python/packages/ai/teams/ai/data_sources/data_source.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,10 @@ class DataSource(ABC):
A data source that can be used to render text that's added to a prompt.
"""

name: str
"Name of the data source."
@property
@abstractmethod
lilyydu marked this conversation as resolved.
Show resolved Hide resolved
def name(self) -> str:
"Name of the data source."

@abstractmethod
async def render_data(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ def __init__(self, name: str, text: str) -> None:
self._text = text

@property
def get_name(self) -> str:
def name(self) -> str:
"""
Name of the data source.
"""
Expand Down
7 changes: 6 additions & 1 deletion python/packages/ai/teams/ai/promptsv2/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,8 +12,13 @@
from .group_section import GroupSection
from .layout_engine import LayoutEngine
from .message import ImageContentPart, ImageUrl, Message, TextContentPart
from .prompt_section import PromptFunctions, PromptSection
from .prompt import Prompt
from .prompt_functions import PromptFunction, PromptFunctions
from .prompt_manager import PromptManager
from .prompt_manager_options import PromptManagerOptions
from .prompt_section import PromptSection
from .prompt_section_base import PromptSectionBase
from .prompt_template import PromptTemplate
from .rendered_prompt_section import RenderedPromptSection
from .system_message import SystemMessage
from .template_section import TemplateSection
Expand Down
17 changes: 17 additions & 0 deletions python/packages/ai/teams/ai/promptsv2/completion_config.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,3 +69,20 @@ class CompletionConfig:
stop_sequences: Optional[List[str]] = None
temperature: float = 0
top_p: float = 0

@classmethod
def from_dict(cls, data: dict) -> "CompletionConfig":
return cls(
completion_type=data.get("completion_type"),
frequency_penalty=data.get("frequency_penalty", 0),
include_history=data.get("include_history", True),
include_input=data.get("include_input", True),
include_images=data.get("include_images", False),
max_tokens=data.get("max_tokens", 150),
max_input_tokens=data.get("max_input_tokens", 2048),
model=data.get("model"),
presence_penalty=data.get("presence_penalty", 0),
stop_sequences=data.get("stop_sequences"),
temperature=data.get("temperature", 0),
top_p=data.get("top_p", 0),
)
301 changes: 301 additions & 0 deletions python/packages/ai/teams/ai/promptsv2/prompt_manager.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,301 @@
"""
Copyright (c) Microsoft Corporation. All rights reserved.
Licensed under the MIT License.
"""

import json
import os
from copy import deepcopy
from pathlib import Path
from typing import Any, Dict, List

from botbuilder.core import TurnContext

from ...app_error import ApplicationError
from ...state import Memory
from ..data_sources import DataSource
from ..tokenizers import Tokenizer
from .conversation_history import ConversationHistory
from .group_section import GroupSection
from .prompt import Prompt
from .prompt_functions import PromptFunction, PromptFunctions
from .prompt_manager_options import PromptManagerOptions
from .prompt_section import PromptSection
from .prompt_template import PromptTemplate
from .prompt_template_config import PromptTemplateConfig
from .template_section import TemplateSection
from .user_input_message import UserInputMessage
from .user_message import UserMessage


class PromptManager(PromptFunctions):
_options: PromptManagerOptions
_data_sources: Dict[str, DataSource]
_functions: Dict[str, PromptFunction]
_prompts: Dict[str, PromptTemplate]

def __init__(self, options: PromptManagerOptions):
"""
Creates a new 'PromptManager' instance.

Args:
options (PromptManagerOptions): Options used to configure the prompt manager.
"""
self._options = options
self._data_sources = {}
self._functions = {}
self._prompts = {}

@property
def options(self) -> PromptManagerOptions:
"""
Gets the configured prompt manager options.
"""
return self._options

def add_data_source(self, data_source: DataSource) -> "PromptManager":
"""
Registers a new data source with the prompt manager.

Args:
data_source (DataSource): Data source to add.

Returns:
PromptManager: The prompt manager for chaining.

Raises:
ApplicationError: If a data source with the same name already exists.
"""
if data_source.name in self._data_sources:
raise ApplicationError(f"DataSource '{data_source.name}' already exists.")
self._data_sources[data_source.name] = data_source
return self

def get_data_source(self, name: str) -> DataSource:
"""
Looks up a data source by name.

Args:
name (str): Name of the data source to lookup.

Returns:
DataSource: The data source.

Raises:
ApplicationError: If the data source is not found.
"""
if name not in self._data_sources:
raise ApplicationError(f"DataSource '{name}' not found.")
return self._data_sources[name]

def has_data_source(self, name: str) -> bool:
"""
Checks for the existence of a named data source.

Args:
name (str): Name of the data source to lookup.

Returns:
bool: True if the data source exists, False otherwise.
"""
return name in self._data_sources

def add_function(self, name: str, function: PromptFunction) -> "PromptManager":
"""
Registers a new prompt template function with the prompt manager.

Args:
name (str): Name of the function to add.
fn (PromptFunction): Function to add.

Returns:
PromptManager: The prompt manager for chaining.

Raises:
ApplicationError: If a function with the same name already exists.
"""
if name in self._functions:
raise ApplicationError(f"Function '{name}' already exists.")
self._functions[name] = function
return self

def get_function(self, name: str) -> PromptFunction:
"""
Looks up a prompt template function by name.

Args:
name (str): Name of the function to lookup.

Returns:
PromptFunction: The function.

Raises:
ApplicationError: If the function is not found.
"""
if name not in self._functions:
raise ApplicationError(f"Function '{name}' not found.")
return self._functions[name]

def has_function(self, name: str) -> bool:
"""
Checks for the existence of a named prompt template function.

Args:
name (str): Name of the function to lookup.

Returns:
bool: True if the function exists, False otherwise.
"""
return name in self._functions

async def invoke_function(
self, name: str, context: TurnContext, memory: Memory, tokenizer: Tokenizer, args: List[str]
) -> Any:
"""
Invokes a prompt template function by name.

Args:
name (str): Name of the function to invoke.
context (TurnContext): Turn context for the current turn of conversation with the user.
memory (Memory): An interface for accessing state values.
tokenizer (Tokenizer): Tokenizer to use when rendering the prompt.
args (List[str]): Arguments to pass to the function.

Returns:
Any: Value returned by the function.
"""
function = self.get_function(name)
return await function(context, memory, self, tokenizer, args)

def add_prompt(self, prompt: PromptTemplate) -> "PromptManager":
"""
Registers a new prompt template with the prompt manager.

Args:
prompt (PromptTemplate): Prompt template to add.

Returns:
PromptManager: The prompt manager for chaining.

Raises:
ApplicationError: If a prompt with the same name already exists.
"""
if prompt.name in self._prompts:
raise ApplicationError(
(
"The PromptManager.add_prompt() method was called with a "
f"previously registered prompt named '{prompt.name}'."
)
)

# Clone and cache prompt
self._prompts[prompt.name] = deepcopy(prompt)
return self

async def get_prompt(self, name) -> PromptTemplate:
"""
Loads a named prompt template from the filesystem.

The template will be pre-parsed and cached for use when the template is rendered by name.

Any augmentations will also be added to the template.

Args:
name (str): Name of the prompt to load.

Returns:
PromptTemplate: The loaded and parsed prompt template.

Raises:
ApplicationError: If the prompt is not found or there is an error loading it.
"""
if name not in self._prompts:
template_name = name

# Load template from disk
folder = os.path.join(self._options.prompts_folder, name)
config_file = os.path.join(folder, "config.json")
prompt_file = os.path.join(folder, "skprompt.txt")

# Load prompt config
try:
with open(config_file, "r", encoding="utf-8") as file:
template_config = PromptTemplateConfig.from_dict(json.load(file))
except Exception as e:
raise ApplicationError(
(
"PromptManager.get_prompt(): an error occurred while loading "
f"'{config_file}'. The file is either invalid or missing."
)
) from e

# Load prompt text
sections: List[PromptSection] = []
try:
with open(prompt_file, "r", encoding="utf-8") as file:
prompt = file.read()
sections.append(TemplateSection(prompt, self._options.role))
except Exception as e:
raise ApplicationError(
(
"PromptManager.get_prompt(): an error occurred while loading "
f"'{prompt_file}'. The file is either invalid or missing."
)
) from e

# Migrate the templates config as needed
self._update_config(template_config)

# Group everything into a system message
sections = [GroupSection(sections, "system")]

# Include conversation history
# - The ConversationHistory section will use the remaining tokens from
# max_input_tokens.
if template_config.completion.include_history:
sections.append(
ConversationHistory(
f"conversation.{template_name}_history",
self._options.max_conversation_history_tokens,
)
)

# Include user input
if template_config.completion.include_images:
sections.append(UserInputMessage(self._options.max_input_tokens))
elif template_config.completion.include_input:
sections.append(UserMessage("{{$temp.input}}", self._options.max_input_tokens))

template = PromptTemplate(template_name, Prompt(sections), template_config)

# Cache loaded template
self._prompts[name] = template

return self._prompts[name]

def has_prompt(self, name: str) -> bool:
"""
Checks for the existence of a named prompt.

Args:
name (str): Name of the prompt to check.

Returns:
bool: True if the prompt exists, False otherwise.
"""
if name not in self._prompts:
folder = os.path.join(self._options.prompts_folder, name)
prompt_file = os.path.join(folder, "skprompt.txt")

return Path(prompt_file).exists()
return True

def _update_config(self, template_config: PromptTemplateConfig):
# Migrate old schema
if template_config.schema == 1:
template_config.schema = 1.1
if (
template_config.default_backends is not None
and len(template_config.default_backends) > 0
):
template_config.completion.model = template_config.default_backends[0]
Loading
Loading