Skip to content

Commit

Permalink
Merge pull request #1040 from microsoft/isidorn/heavy-pig
Browse files Browse the repository at this point in the history
error handling in chat sample
  • Loading branch information
isidorn authored May 31, 2024
2 parents 7c34798 + f6fd821 commit 1071c55
Showing 1 changed file with 66 additions and 42 deletions.
108 changes: 66 additions & 42 deletions chat-sample/src/extension.ts
Original file line number Diff line number Diff line change
Expand Up @@ -23,16 +23,21 @@ export function activate(context: vscode.ExtensionContext) {
if (request.command == 'teach') {
stream.progress('Picking the right topic to teach...');
const topic = getTopic(context.history);
const messages = [
vscode.LanguageModelChatMessage.User('You are a cat! Your job is to explain computer science concepts in the funny manner of a cat. Always start your response by stating what concept you are explaining. Always include code samples.'),
vscode.LanguageModelChatMessage.User(topic)
];
const [model] = await vscode.lm.selectChatModels(MODEL_SELECTOR);
if (model) {
const chatResponse = await model.sendRequest(messages, {}, token);
for await (const fragment of chatResponse.text) {
stream.markdown(fragment);
try {
const [model] = await vscode.lm.selectChatModels(MODEL_SELECTOR);
if (model) {
const messages = [
vscode.LanguageModelChatMessage.User('You are a cat! Your job is to explain computer science concepts in the funny manner of a cat. Always start your response by stating what concept you are explaining. Always include code samples.'),
vscode.LanguageModelChatMessage.User(topic)
];

const chatResponse = await model.sendRequest(messages, {}, token);
for await (const fragment of chatResponse.text) {
stream.markdown(fragment);
}
}
} catch(err) {
handleError(err, stream);
}

stream.button({
Expand All @@ -43,36 +48,46 @@ export function activate(context: vscode.ExtensionContext) {
return { metadata: { command: 'teach' } };
} else if (request.command == 'play') {
stream.progress('Throwing away the computer science books and preparing to play with some Python code...');
const [model] = await vscode.lm.selectChatModels(MODEL_SELECTOR);
if (model) {
// Here's an example of how to use the prompt-tsx library to build a prompt
const { messages } = await renderPrompt(
PlayPrompt,
{ userQuery: request.prompt },
{ modelMaxPromptTokens: model.maxInputTokens },
new Cl100KBaseTokenizer());
const chatResponse = await model.sendRequest(messages, {}, token);
for await (const fragment of chatResponse.text) {
stream.markdown(fragment);
try {
const [model] = await vscode.lm.selectChatModels(MODEL_SELECTOR);
if (model) {
// Here's an example of how to use the prompt-tsx library to build a prompt
const { messages } = await renderPrompt(
PlayPrompt,
{ userQuery: request.prompt },
{ modelMaxPromptTokens: model.maxInputTokens },
new Cl100KBaseTokenizer());

const chatResponse = await model.sendRequest(messages, {}, token);
for await (const fragment of chatResponse.text) {
stream.markdown(fragment);
}
}
} catch(err) {
handleError(err, stream);
}

return { metadata: { command: 'play' } };
} else {
const messages = [
vscode.LanguageModelChatMessage.User(`You are a cat! Think carefully and step by step like a cat would.
Your job is to explain computer science concepts in the funny manner of a cat, using cat metaphors. Always start your response by stating what concept you are explaining. Always include code samples.`),
vscode.LanguageModelChatMessage.User(request.prompt)
];
const [model] = await vscode.lm.selectChatModels(MODEL_SELECTOR);
if (model) {
const chatResponse = await model.sendRequest(messages, {}, token);
for await (const fragment of chatResponse.text) {
// Process the output from the language model
// Replace all python function definitions with cat sounds to make the user stop looking at the code and start playing with the cat
const catFragment = fragment.replaceAll('def', 'meow');
stream.markdown(catFragment);
try {
const [model] = await vscode.lm.selectChatModels(MODEL_SELECTOR);
if (model) {
const messages = [
vscode.LanguageModelChatMessage.User(`You are a cat! Think carefully and step by step like a cat would.
Your job is to explain computer science concepts in the funny manner of a cat, using cat metaphors. Always start your response by stating what concept you are explaining. Always include code samples.`),
vscode.LanguageModelChatMessage.User(request.prompt)
];

const chatResponse = await model.sendRequest(messages, {}, token);
for await (const fragment of chatResponse.text) {
// Process the output from the language model
// Replace all python function definitions with cat sounds to make the user stop looking at the code and start playing with the cat
const catFragment = fragment.replaceAll('def', 'meow');
stream.markdown(catFragment);
}
}
} catch(err) {
handleError(err, stream);
}

return { metadata: { command: '' } };
Expand Down Expand Up @@ -100,11 +115,6 @@ export function activate(context: vscode.ExtensionContext) {
vscode.commands.registerTextEditorCommand(CAT_NAMES_COMMAND_ID, async (textEditor: vscode.TextEditor) => {
// Replace all variables in active editor with cat names and words
const text = textEditor.document.getText();
const messages = [
vscode.LanguageModelChatMessage.User(`You are a cat! Think carefully and step by step like a cat would.
Your job is to replace all variable names in the following code with funny cat variable names. Be creative. IMPORTANT respond just with code. Do not use markdown!`),
vscode.LanguageModelChatMessage.User(text)
];

let chatResponse: vscode.LanguageModelChatResponse | undefined;
try {
Expand All @@ -114,13 +124,14 @@ export function activate(context: vscode.ExtensionContext) {
return;
}

const messages = [
vscode.LanguageModelChatMessage.User(`You are a cat! Think carefully and step by step like a cat would.
Your job is to replace all variable names in the following code with funny cat variable names. Be creative. IMPORTANT respond just with code. Do not use markdown!`),
vscode.LanguageModelChatMessage.User(text)
];
chatResponse = await model.sendRequest(messages, {}, new vscode.CancellationTokenSource().token);

} catch (err) {
// making the chat request might fail because
// - model does not exist
// - user consent not given
// - quote limits exceeded
if (err instanceof vscode.LanguageModelError) {
console.log(err.message, err.code, err.cause)
}
Expand Down Expand Up @@ -155,6 +166,19 @@ export function activate(context: vscode.ExtensionContext) {
);
}

function handleError(err: any, stream: vscode.ChatResponseStream): void {
// making the chat request might fail because
// - model does not exist
// - user consent not given
// - quote limits exceeded
if (err instanceof vscode.LanguageModelError) {
console.log(err.message, err.code, err.cause);
if (err.cause instanceof Error && err.cause.message.includes('off_topic')) {
stream.markdown(vscode.l10n.t('I\'m sorry, I can only explain computer science concepts.'));
}
}
}

// Get a random topic that the cat has not taught in the chat history yet
function getTopic(history: ReadonlyArray<vscode.ChatRequestTurn | vscode.ChatResponseTurn>): string {
const topics = ['linked list', 'recursion', 'stack', 'queue', 'pointers'];
Expand Down

0 comments on commit 1071c55

Please sign in to comment.