Skip to content

Commit

Permalink
Merge branch 'kavin/py-assistants-mi' of https://github.com/microsoft…
Browse files Browse the repository at this point in the history
…/teams-ai into kavin/py-assistants-mi
  • Loading branch information
singhk97 committed Oct 30, 2024
2 parents ab791b5 + 896488a commit 1e06064
Show file tree
Hide file tree
Showing 7 changed files with 125 additions and 28 deletions.
33 changes: 32 additions & 1 deletion js/packages/teams-ai/src/StreamingResponse.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@ import assert from 'assert';
import { TestAdapter } from 'botbuilder';
import { CardFactory } from 'botbuilder-core';
import { StreamingResponse } from './StreamingResponse';
import { Citation } from './prompts/Message';

describe('StreamingResponse', function () {
describe('constructor()', () => {
Expand Down Expand Up @@ -163,6 +164,34 @@ describe('StreamingResponse', function () {
});
});

it('should send a final message with text and citations', async () => {
const adapter = new TestAdapter();
await adapter.sendTextToBot('test', async (context) => {
const response = new StreamingResponse(context);
response.queueTextChunk('first', [
{ content: 'test-content', url: 'https://example.com', title: 'test', filepath: 'test' } as Citation
]);
response.queueTextChunk('second');
await response.waitForQueue();
await response.endStream();
assert(response.updatesSent == 2, 'updatesSent should be 2');
assert(response.citations == undefined, 'no citations matched');

// Validate sent activities
const activities = adapter.activeQueue;
assert.equal(activities.length, 3, 'should have sent 3 activities');
assert.equal(activities[0].channelData.streamSequence, 1, 'first activity streamSequence should be 1');
assert.equal(activities[1].channelData.streamSequence, 2, 'second activity streamSequence should be 2');
assert.equal(activities[2].type, 'message', 'final activity type should be "message"');
assert.equal(activities[2].text, 'firstsecond', 'final activity text should be "firstsecond"');
assert.deepEqual(
activities[2].channelData,
{ streamType: 'final', streamId: response.streamId, feedbackLoopEnabled: false },
'final activity channelData should match'
);
});
});

it('should send a final message with powered by AI features', async () => {
const adapter = new TestAdapter();
await adapter.sendTextToBot('test', async (context) => {
Expand Down Expand Up @@ -209,7 +238,9 @@ describe('StreamingResponse', function () {
'@type': 'Message',
'@context': 'https://schema.org',
'@id': '',
additionalType: ['AIGeneratedContent']
additionalType: ['AIGeneratedContent'],
citation: [],
usageInfo: undefined
}
],
'final activity entities obj should match'
Expand Down
56 changes: 53 additions & 3 deletions js/packages/teams-ai/src/StreamingResponse.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,9 @@
*/

import { Activity, Attachment, TurnContext, Entity } from 'botbuilder-core';
import { AIEntity } from './types';
import { AIEntity, ClientCitation, SensitivityUsageInfo } from './types';
import { Citation } from './prompts/Message';
import { Utilities } from './Utilities';

/**
* A helper class for streaming responses to the client.
Expand Down Expand Up @@ -35,6 +37,8 @@ export class StreamingResponse {
// Powered by AI feature flags
private _enableFeedbackLoop = false;
private _enableGeneratedByAILabel = false;
private _citations?: ClientCitation[] = [];
private _sensitivityLabel?: SensitivityUsageInfo;

/**
* Creates a new StreamingResponse instance.
Expand All @@ -55,6 +59,13 @@ export class StreamingResponse {
return this._streamId;
}

/**
* Gets the citations of the current response.
*/
public get citations(): ClientCitation[] | undefined {
return this._citations;
}

/**
* Gets the number of updates sent for the stream.
* @returns {number} - The number of updates sent for the stream.
Expand Down Expand Up @@ -89,15 +100,44 @@ export class StreamingResponse {
* The text we be sent as quickly as possible to the client. Chunks may be combined before
* delivery to the client.
* @param {string} text Partial text of the message to send.
* @param {Citation[]} citations Citations to be included in the message.
*/
public queueTextChunk(text: string): void {
public queueTextChunk(text: string, citations?: Citation[]): void {
if (this._ended) {
throw new Error('The stream has already ended.');
}

// Update full message text
this._message += text;

if (citations && citations.length > 0) {
if (!this._citations) {
this._citations = [];
}
let currPos = this._citations.length;

for (const citation of citations) {
const clientCitation: ClientCitation = {
'@type': 'Claim',
position: `${currPos + 1}`,
appearance: {
'@type': 'DigitalDocument',
name: citation.title || `Document #${currPos + 1}`,
abstract: Utilities.snippet(citation.content, 477)
}
};
currPos++;
this._citations.push(clientCitation);
}

// If there are citations, modify the content so that the sources are numbers instead of [doc1], [doc2], etc.
this._message =
this._citations.length == 0 ? this._message : Utilities.formatCitationsResponse(this._message);

// If there are citations, filter out the citations unused in content.
this._citations = Utilities.getUsedCitations(this._message, this._citations) ?? undefined;
}

// Queue the next chunk
this.queueNextChunk();
}
Expand Down Expand Up @@ -127,6 +167,14 @@ export class StreamingResponse {
this._attachments = attachments;
}

/**
* Sets the sensitivity label to attach to the final chunk.
* @param sensitivityLabel The sensitivty label.
*/
public setSensitivityLabel(sensitivityLabel: SensitivityUsageInfo): void {
this._sensitivityLabel = sensitivityLabel;
}

/**
* Sets the Feedback Loop in Teams that allows a user to
* give thumbs up or down to a response.
Expand Down Expand Up @@ -279,7 +327,9 @@ export class StreamingResponse {
'@type': 'Message',
'@context': 'https://schema.org',
'@id': '',
additionalType: ['AIGeneratedContent']
additionalType: ['AIGeneratedContent'],
citation: this._citations && this._citations.length > 0 ? this._citations : [],
usageInfo: this._sensitivityLabel
} as AIEntity);
}
}
Expand Down
28 changes: 15 additions & 13 deletions js/packages/teams-ai/src/models/OpenAIModel.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -98,21 +98,23 @@ describe('OpenAIModel', () => {
});

const mockResponse = {
choices: [{
message: {
role: 'assistant',
content: 'Test response',
context: {
citations: [
{
content: 'Citation content',
title: 'Citation title',
url: 'https://citation.url'
}
]
choices: [
{
message: {
role: 'assistant',
content: 'Test response',
context: {
citations: [
{
content: 'Citation content',
title: 'Citation title',
url: 'https://citation.url'
}
]
}
}
}
}]
]
};

// Mock the API call
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -119,10 +119,12 @@ describe('AzureContentSafetyModerator', () => {
status: '200',
statusText: 'OK',
data: {
categoriesAnalysis: [{
category: 'Hate',
severity: 1
}]
categoriesAnalysis: [
{
category: 'Hate',
severity: 1
}
]
}
})
);
Expand Down Expand Up @@ -170,10 +172,12 @@ describe('AzureContentSafetyModerator', () => {
status: '200',
statusText: 'OK',
data: {
categoriesAnalysis: [{
category: 'Hate',
severity: 7
}]
categoriesAnalysis: [
{
category: 'Hate',
severity: 7
}
]
}
})
);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,7 @@ export interface AzureOpenAIModeratorOptions extends OpenAIModeratorOptions {
/**
* @deprecated
* use `haltOnBlocklistHit`
*
*
* When set to true, further analyses of harmful content will not be performed in cases where blocklists are hit.
* When set to false, all analyses of harmful content will be performed, whether or not blocklists are hit.
* Default value is false.
Expand Down Expand Up @@ -160,9 +160,11 @@ export class AzureContentSafetyModerator<TState extends TurnState = TurnState> e

const predicate = (category: AzureOpenAIModeratorCategory) => {
return (c: ContentSafetyHarmCategory) => {
return c.category === category &&
return (
c.category === category &&
c.severity > 0 &&
c.severity <= this._azureContentSafetyCategories[category].severity
);
};
};

Expand Down
4 changes: 3 additions & 1 deletion js/packages/teams-ai/src/planners/LLMClient.ts
Original file line number Diff line number Diff line change
Expand Up @@ -327,8 +327,10 @@ export class LLMClient<TContent = any> {

// Send chunk to client
const text = chunk.delta?.content ?? '';
const citations = chunk.delta?.context?.citations ?? undefined;

if (text.length > 0) {
streamer.queueTextChunk(text);
streamer.queueTextChunk(text, citations);
}
};

Expand Down
6 changes: 6 additions & 0 deletions js/packages/teams-ai/src/types/AIEntity.ts
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
*/

import { ClientCitation } from './ClientCitation';
import { SensitivityUsageInfo } from './SensitivityUsageInfo';

export interface AIEntity {
/**
Expand Down Expand Up @@ -38,4 +39,9 @@ export interface AIEntity {
* Optional; if citations object is included, the sent activity will include the citations, referenced in the activity text.
*/
citation?: ClientCitation[];

/**
* Optional; if usage_info object is included, the sent activity will include the sensitivity usage information.
*/
usageInfo?: SensitivityUsageInfo;
}

0 comments on commit 1e06064

Please sign in to comment.