@@ -13,6 +13,8 @@ import { HarmBlockThreshold, HarmCategory } from "@google/generative-ai";
1313import { ChatAnthropic } from "@langchain/anthropic" ;
1414import { ChatCohere } from "@langchain/cohere" ;
1515import { BaseChatModel } from "@langchain/core/language_models/chat_models" ;
16+ import { AIMessage } from "@langchain/core/messages" ;
17+ import { Runnable } from "@langchain/core/runnables" ;
1618import { ChatDeepSeek } from "@langchain/deepseek" ;
1719import { ChatGoogleGenerativeAI } from "@langchain/google-genai" ;
1820import { ChatGroq } from "@langchain/groq" ;
@@ -21,11 +23,63 @@ import { ChatOllama } from "@langchain/ollama";
2123import { ChatOpenAI } from "@langchain/openai" ;
2224import { ChatXAI } from "@langchain/xai" ;
2325import { Notice } from "obsidian" ;
26+ import { GitHubCopilotProvider } from "./githubCopilotProvider" ;
27+ import { ChatPromptValue } from "@langchain/core/prompt_values" ;
28+
29+ class CopilotRunnable extends Runnable {
30+ lc_serializable = false ;
31+ lc_namespace = [ "langchain" , "chat_models" , "copilot" ] ;
32+ private provider : GitHubCopilotProvider ;
33+ private modelName : string ;
34+
35+ constructor ( provider : GitHubCopilotProvider , modelName : string ) {
36+ super ( ) ;
37+ this . provider = provider ;
38+ this . modelName = modelName ;
39+ }
40+
41+ async invoke ( input : ChatPromptValue , options ?: any ) : Promise < any > {
42+ const messages = input . toChatMessages ( ) . map ( ( m ) => ( {
43+ role : m . _getType ( ) === "human" ? "user" : "assistant" ,
44+ content : m . content as string ,
45+ } ) ) ;
46+ const response = await this . provider . sendChatMessage ( messages , this . modelName ) ;
47+ const content = response . choices ?. [ 0 ] ?. message ?. content || "" ;
48+ return new AIMessage ( content ) ;
49+ }
50+ }
2451
2552type ChatConstructorType = {
2653 new ( config : any ) : any ;
2754} ;
2855
56+ // Placeholder for GitHub Copilot chat provider
57+ class ChatGitHubCopilot {
58+ private provider : GitHubCopilotProvider ;
59+ constructor ( config : any ) {
60+ this . provider = new GitHubCopilotProvider ( ) ;
61+ // TODO: Use config for persistent storage, UI callbacks, etc.
62+ }
63+ async send ( messages : { role : string ; content : string } [ ] , model = "gpt-4" ) {
64+ return this . provider . sendChatMessage ( messages , model ) ;
65+ }
66+ getAuthState ( ) {
67+ return this . provider . getAuthState ( ) ;
68+ }
69+ async startAuth ( ) {
70+ return this . provider . startDeviceCodeFlow ( ) ;
71+ }
72+ async pollForAccessToken ( ) {
73+ return this . provider . pollForAccessToken ( ) ;
74+ }
75+ async fetchCopilotToken ( ) {
76+ return this . provider . fetchCopilotToken ( ) ;
77+ }
78+ resetAuth ( ) {
79+ this . provider . resetAuth ( ) ;
80+ }
81+ }
82+
2983const CHAT_PROVIDER_CONSTRUCTORS = {
3084 [ ChatModelProviders . OPENAI ] : ChatOpenAI ,
3185 [ ChatModelProviders . AZURE_OPENAI ] : ChatOpenAI ,
@@ -41,6 +95,7 @@ const CHAT_PROVIDER_CONSTRUCTORS = {
4195 [ ChatModelProviders . COPILOT_PLUS ] : ChatOpenAI ,
4296 [ ChatModelProviders . MISTRAL ] : ChatMistralAI ,
4397 [ ChatModelProviders . DEEPSEEK ] : ChatDeepSeek ,
98+ [ ChatModelProviders . GITHUB_COPILOT ] : ChatGitHubCopilot , // Register GitHub Copilot
4499} as const ;
45100
46101type ChatProviderConstructMap = typeof CHAT_PROVIDER_CONSTRUCTORS ;
@@ -72,6 +127,7 @@ export default class ChatModelManager {
72127 [ ChatModelProviders . COPILOT_PLUS ] : ( ) => getSettings ( ) . plusLicenseKey ,
73128 [ ChatModelProviders . MISTRAL ] : ( ) => getSettings ( ) . mistralApiKey ,
74129 [ ChatModelProviders . DEEPSEEK ] : ( ) => getSettings ( ) . deepseekApiKey ,
130+ [ ChatModelProviders . GITHUB_COPILOT ] : ( ) => "" , // Placeholder for GitHub Copilot
75131 } as const ;
76132
77133 private constructor ( ) {
@@ -97,10 +153,16 @@ export default class ChatModelManager {
97153 const isThinkingEnabled =
98154 modelName . startsWith ( "claude-3-7-sonnet" ) || modelName . startsWith ( "claude-sonnet-4" ) ;
99155
156+ // For GitHub Copilot, streaming is not supported
157+ const streaming =
158+ customModel . provider === ChatModelProviders . GITHUB_COPILOT
159+ ? false
160+ : ( customModel . stream ?? true ) ;
161+
100162 // Base config without temperature when thinking is enabled
101163 const baseConfig : Omit < ModelConfig , "maxTokens" | "maxCompletionTokens" | "temperature" > = {
102164 modelName : modelName ,
103- streaming : customModel . stream ?? true ,
165+ streaming,
104166 maxRetries : 3 ,
105167 maxConcurrency : 3 ,
106168 enableCors : customModel . enableCors ,
@@ -250,6 +312,7 @@ export default class ChatModelManager {
250312 fetch : customModel . enableCors ? safeFetch : undefined ,
251313 } ,
252314 } ,
315+ [ ChatModelProviders . GITHUB_COPILOT ] : { } , // Placeholder config for GitHub Copilot
253316 } ;
254317
255318 const selectedProviderConfig =
@@ -344,35 +407,28 @@ export default class ChatModelManager {
344407 }
345408
346409 async setChatModel ( model : CustomModel ) : Promise < void > {
347- const modelKey = getModelKeyFromModel ( model ) ;
348410 try {
349- const modelInstance = await this . createModelInstance ( model ) ;
350- ChatModelManager . chatModel = modelInstance ;
351- } catch ( error ) {
352- logError ( error ) ;
353- new Notice ( `Error creating model: ${ modelKey } ` ) ;
411+ ChatModelManager . chatModel = await this . createModelInstance ( model ) ;
412+ logInfo ( `Chat model set to ${ model . name } ` ) ;
413+ } catch ( e ) {
414+ logError ( "Failed to set chat model:" , e ) ;
415+ new Notice ( `Failed to set chat model: ${ e . message } ` ) ;
416+ ChatModelManager . chatModel = null ;
354417 }
355418 }
356419
357420 async createModelInstance ( model : CustomModel ) : Promise < BaseChatModel > {
358- // Create and return the appropriate model
359- const modelKey = getModelKeyFromModel ( model ) ;
360- const selectedModel = ChatModelManager . modelMap [ modelKey ] ;
361- if ( ! selectedModel ) {
362- throw new Error ( `No model found for: ${ modelKey } ` ) ;
363- }
364- if ( ! selectedModel . hasApiKey ) {
365- const errorMessage = `API key is not provided for the model: ${ modelKey } .` ;
366- new Notice ( errorMessage ) ;
367- throw new Error ( errorMessage ) ;
421+ if ( model . provider === ChatModelProviders . GITHUB_COPILOT ) {
422+ const provider = new GitHubCopilotProvider ( ) ;
423+ const copilotRunnable = new CopilotRunnable ( provider , model . name ) ;
424+ // The type assertion is a bit of a hack, but it makes it work with the existing structure
425+ return copilotRunnable as unknown as BaseChatModel ;
368426 }
369427
370- const modelConfig = await this . getModelConfig ( model ) ;
428+ const AIConstructor = this . getProviderConstructor ( model ) ;
429+ const config = await this . getModelConfig ( model ) ;
371430
372- const newModelInstance = new selectedModel . AIConstructor ( {
373- ...modelConfig ,
374- } ) ;
375- return newModelInstance ;
431+ return new AIConstructor ( config ) ;
376432 }
377433
378434 validateChatModel ( chatModel : BaseChatModel ) : boolean {
@@ -427,6 +483,19 @@ export default class ChatModelManager {
427483 }
428484
429485 async ping ( model : CustomModel ) : Promise < boolean > {
486+ if ( model . provider === ChatModelProviders . GITHUB_COPILOT ) {
487+ const provider = new GitHubCopilotProvider ( ) ;
488+ const state = provider . getAuthState ( ) ;
489+ if ( state . status === "authenticated" ) {
490+ new Notice ( "GitHub Copilot is authenticated." ) ;
491+ return true ;
492+ } else {
493+ new Notice (
494+ "GitHub Copilot is not authenticated. Please set it up in the 'Basic' settings tab."
495+ ) ;
496+ return false ;
497+ }
498+ }
430499 const tryPing = async ( enableCors : boolean ) => {
431500 const modelToTest = { ...model , enableCors } ;
432501 const modelConfig = await this . getModelConfig ( modelToTest ) ;
0 commit comments