Skip to content

Commit

Permalink
linting fixes
Browse files Browse the repository at this point in the history
  • Loading branch information
lilydu committed Dec 27, 2024
1 parent bd9b55b commit af82a22
Show file tree
Hide file tree
Showing 5 changed files with 23 additions and 16 deletions.
10 changes: 5 additions & 5 deletions js/packages/teams-ai/src/AI.ts
Original file line number Diff line number Diff line change
Expand Up @@ -238,10 +238,10 @@ export class AI<TState extends TurnState = TurnState> {
this.defaultAction(AI.PlanReadyActionName, actions.planReady());
this.defaultAction(AI.DoCommandActionName, actions.doCommand());
this.defaultAction(AI.TooManyStepsActionName, actions.tooManySteps());
this.defaultAction(AI.SayCommandActionName, actions.sayCommand(
this._options.enable_feedback_loop,
this._options.feedback_loop_type || 'default'
));
this.defaultAction(
AI.SayCommandActionName,
actions.sayCommand(this._options.enable_feedback_loop, this._options.feedback_loop_type || 'default')
);
}

/**
Expand Down Expand Up @@ -271,7 +271,7 @@ export class AI<TState extends TurnState = TurnState> {
/**
* @returns {boolean} Returns the feedback loop type.
*/
public get feedbackLoopType(): "default" | "custom" | undefined {
public get feedbackLoopType(): 'default' | 'custom' | undefined {
return this._options.feedback_loop_type;
}

Expand Down
2 changes: 1 addition & 1 deletion js/packages/teams-ai/src/StreamingResponse.ts
Original file line number Diff line number Diff line change
Expand Up @@ -170,7 +170,7 @@ export class StreamingResponse {
'@type': 'DigitalDocument',
name: citation.title || `Document #${currPos + 1}`,
abstract: Utilities.snippet(citation.content, 477)
},
}
};
currPos++;
this._citations.push(clientCitation);
Expand Down
15 changes: 9 additions & 6 deletions js/packages/teams-ai/src/actions/SayCommand.ts
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ import { AIEntity, ClientCitation } from '../types';
*/
export function sayCommand<TState extends TurnState = TurnState>(
feedbackLoopEnabled: boolean = false,
feedbackLoopType: 'default' | 'custom' = 'default',
feedbackLoopType: 'default' | 'custom' = 'default'
) {
return async (context: TurnContext, _state: TState, data: PredictedSayCommand) => {
if (!data.response?.content) {
Expand Down Expand Up @@ -58,11 +58,14 @@ export function sayCommand<TState extends TurnState = TurnState>(

// If there are citations, filter out the citations unused in content.
const referencedCitations = citations ? Utilities.getUsedCitations(contentText, citations) : undefined;
const channelData = feedbackLoopEnabled && feedbackLoopType ? {
feedbackLoop: {
type: feedbackLoopType
}
} : { feedbackLoopEnabled };
const channelData =
feedbackLoopEnabled && feedbackLoopType
? {
feedbackLoop: {
type: feedbackLoopType
}
}
: { feedbackLoopEnabled };

const entities: AIEntity[] = [
{
Expand Down
10 changes: 7 additions & 3 deletions js/packages/teams-ai/src/models/OpenAIModel.ts
Original file line number Diff line number Diff line change
Expand Up @@ -356,7 +356,7 @@ export class OpenAIModel implements PromptCompletionModel {

// Check for tools augmentation
const isToolsAugmentation =
template.config.augmentation && template.config.augmentation?.augmentation_type == 'tools';
template.config.augmentation && template.config.augmentation?.augmentation_type == 'tools';

// Call chat completion API
let message: Message<string>;
Expand Down Expand Up @@ -392,7 +392,11 @@ export class OpenAIModel implements PromptCompletionModel {
// - Note that a single tool call can span multiple chunks.
const index = toolCall.index;
if (index >= message.action_calls.length) {
message.action_calls.push({ id: '', function: { name: '', arguments: '' }, type: '' } as any);
message.action_calls.push({
id: '',
function: { name: '', arguments: '' },
type: ''
} as any);
}

// Set ID if provided
Expand All @@ -412,7 +416,7 @@ export class OpenAIModel implements PromptCompletionModel {

// Append function arguments if provided
if (toolCall.function?.arguments) {
message.action_calls[index].function.arguments += toolCall.function.arguments;
message.action_calls[index].function.arguments += toolCall.function.arguments;
}
}
}
Expand Down
2 changes: 1 addition & 1 deletion js/packages/teams-ai/src/planners/LLMClient.ts
Original file line number Diff line number Diff line change
Expand Up @@ -378,7 +378,7 @@ export class LLMClient<TContent = any> {
// - We need to keep the streamer around during tool calls so we're just letting them return as normal
// messages minus the message content. The text content is being streamed to the client in chunks.
// - When the tool call completes we'll call back into ActionPlanner and end up re-attaching to the
// streamer. This will result in us continuing to stream the response to the client.
// streamer. This will result in us continuing to stream the response to the client.
if (Array.isArray(response.message?.action_calls)) {
// Ensure content is empty for tool calls
response.message!.content = '' as TContent;
Expand Down

0 comments on commit af82a22

Please sign in to comment.