Skip to content

Commit 6d37956

Browse files
committed
fix(merge): merge PR ericc-ch#119 gpt-5-codex responses API
Merged changes from ericc-ch#119 - Added new /responses endpoint with advanced model support - Implemented streaming reliability with periodic pings - Added model-specific configuration system with disk persistence - Refactored Anthropic API compatibility layer - Resolved merge conflict in src/start.ts (kept timeout comments)
2 parents e884ce8 + f1ba9cf commit 6d37956

16 files changed

+2281
-9
lines changed

README.md

Lines changed: 6 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -185,11 +185,12 @@ The server exposes several endpoints to interact with the Copilot API. It provid
185185

186186
These endpoints mimic the OpenAI API structure.
187187

188-
| Endpoint | Method | Description |
189-
| --------------------------- | ------ | --------------------------------------------------------- |
190-
| `POST /v1/chat/completions` | `POST` | Creates a model response for the given chat conversation. |
191-
| `GET /v1/models` | `GET` | Lists the currently available models. |
192-
| `POST /v1/embeddings` | `POST` | Creates an embedding vector representing the input text. |
188+
| Endpoint | Method | Description |
189+
| --------------------------- | ------ | ---------------------------------------------------------------- |
190+
| `POST /v1/responses` | `POST` | Most advanced interface for generating model responses. |
191+
| `POST /v1/chat/completions` | `POST` | Creates a model response for the given chat conversation. |
192+
| `GET /v1/models` | `GET` | Lists the currently available models. |
193+
| `POST /v1/embeddings` | `POST` | Creates an embedding vector representing the input text. |
193194

194195
### Anthropic Compatible Endpoints
195196

src/lib/config.ts

Lines changed: 80 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,80 @@
1+
import consola from "consola"
2+
import fs from "node:fs"
3+
4+
import { PATHS } from "./paths"
5+
6+
export interface AppConfig {
7+
extraPrompts?: Record<string, string>
8+
}
9+
10+
const defaultConfig: AppConfig = {
11+
extraPrompts: {
12+
"gpt-5-codex": `
13+
## Tool use
14+
- You have access to many tools. If a tool exists to perform a specific task, you MUST use that tool instead of running a terminal command to perform that task.
15+
### Bash tool
16+
When using the Bash tool, follow these rules:
17+
- always run_in_background set to false, unless you are running a long-running command (e.g., a server or a watch command).
18+
### BashOutput tool
19+
When using the BashOutput tool, follow these rules:
20+
- Only Bash Tool run_in_background set to true, Use BashOutput to read the output later
21+
### TodoWrite tool
22+
When using the TodoWrite tool, follow these rules:
23+
- Skip using the TodoWrite tool for tasks with three or fewer steps.
24+
- Do not make single-step todo lists.
25+
- When you made a todo, update it after having performed one of the sub-tasks that you shared on the todo list.
26+
## Special user requests
27+
- If the user makes a simple request (such as asking for the time) which you can fulfill by running a terminal command (such as 'date'), you should do so.
28+
`,
29+
},
30+
}
31+
32+
let cachedConfig: AppConfig | null = null
33+
34+
function ensureConfigFile(): void {
35+
try {
36+
fs.accessSync(PATHS.CONFIG_PATH, fs.constants.R_OK | fs.constants.W_OK)
37+
} catch {
38+
fs.writeFileSync(
39+
PATHS.CONFIG_PATH,
40+
`${JSON.stringify(defaultConfig, null, 2)}\n`,
41+
"utf8",
42+
)
43+
try {
44+
fs.chmodSync(PATHS.CONFIG_PATH, 0o600)
45+
} catch {
46+
return
47+
}
48+
}
49+
}
50+
51+
function readConfigFromDisk(): AppConfig {
52+
ensureConfigFile()
53+
try {
54+
const raw = fs.readFileSync(PATHS.CONFIG_PATH, "utf8")
55+
if (!raw.trim()) {
56+
fs.writeFileSync(
57+
PATHS.CONFIG_PATH,
58+
`${JSON.stringify(defaultConfig, null, 2)}\n`,
59+
"utf8",
60+
)
61+
return defaultConfig
62+
}
63+
return JSON.parse(raw) as AppConfig
64+
} catch (error) {
65+
consola.error("Failed to read config file, using default config", error)
66+
return defaultConfig
67+
}
68+
}
69+
70+
export function getConfig(): AppConfig {
71+
if (!cachedConfig) {
72+
cachedConfig = readConfigFromDisk()
73+
}
74+
return cachedConfig
75+
}
76+
77+
export function getExtraPromptForModel(model: string): string {
78+
const config = getConfig()
79+
return config.extraPrompts?.[model] ?? ""
80+
}

src/lib/paths.ts

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,15 +5,18 @@ import path from "node:path"
55
const APP_DIR = path.join(os.homedir(), ".local", "share", "copilot-api")
66

77
const GITHUB_TOKEN_PATH = path.join(APP_DIR, "github_token")
8+
const CONFIG_PATH = path.join(APP_DIR, "config.json")
89

910
export const PATHS = {
1011
APP_DIR,
1112
GITHUB_TOKEN_PATH,
13+
CONFIG_PATH,
1214
}
1315

1416
export async function ensurePaths(): Promise<void> {
1517
await fs.mkdir(PATHS.APP_DIR, { recursive: true })
1618
await ensureFile(PATHS.GITHUB_TOKEN_PATH)
19+
await ensureFile(PATHS.CONFIG_PATH)
1720
}
1821

1922
async function ensureFile(filePath: string): Promise<void> {

src/routes/messages/anthropic-types.ts

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -56,6 +56,7 @@ export interface AnthropicToolUseBlock {
5656
export interface AnthropicThinkingBlock {
5757
type: "thinking"
5858
thinking: string
59+
signature: string
5960
}
6061

6162
export type AnthropicUserContentBlock =

src/routes/messages/handler.ts

Lines changed: 121 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -6,11 +6,25 @@ import { streamSSE } from "hono/streaming"
66
import { awaitApproval } from "~/lib/approval"
77
import { checkRateLimit } from "~/lib/rate-limit"
88
import { state } from "~/lib/state"
9+
import {
10+
createResponsesStreamState,
11+
translateResponsesStreamEvent,
12+
} from "~/routes/messages/responses-stream-translation"
13+
import {
14+
translateAnthropicMessagesToResponsesPayload,
15+
translateResponsesResultToAnthropic,
16+
} from "~/routes/messages/responses-translation"
17+
import { getResponsesRequestOptions } from "~/routes/responses/utils"
918
import {
1019
createChatCompletions,
1120
type ChatCompletionChunk,
1221
type ChatCompletionResponse,
1322
} from "~/services/copilot/create-chat-completions"
23+
import {
24+
createResponses,
25+
type ResponsesResult,
26+
type ResponseStreamEvent,
27+
} from "~/services/copilot/create-responses"
1428

1529
import {
1630
type AnthropicMessagesPayload,
@@ -28,16 +42,31 @@ export async function handleCompletion(c: Context) {
2842
const anthropicPayload = await c.req.json<AnthropicMessagesPayload>()
2943
consola.debug("Anthropic request payload:", JSON.stringify(anthropicPayload))
3044

45+
const useResponsesApi = shouldUseResponsesApi(anthropicPayload.model)
46+
47+
if (state.manualApprove) {
48+
await awaitApproval()
49+
}
50+
51+
if (useResponsesApi) {
52+
return await handleWithResponsesApi(c, anthropicPayload)
53+
}
54+
55+
return await handleWithChatCompletions(c, anthropicPayload)
56+
}
57+
58+
const RESPONSES_ENDPOINT = "/responses"
59+
60+
const handleWithChatCompletions = async (
61+
c: Context,
62+
anthropicPayload: AnthropicMessagesPayload,
63+
) => {
3164
const openAIPayload = translateToOpenAI(anthropicPayload)
3265
consola.debug(
3366
"Translated OpenAI request payload:",
3467
JSON.stringify(openAIPayload),
3568
)
3669

37-
if (state.manualApprove) {
38-
await awaitApproval()
39-
}
40-
4170
const response = await createChatCompletions(openAIPayload)
4271

4372
if (isNonStreaming(response)) {
@@ -86,6 +115,94 @@ export async function handleCompletion(c: Context) {
86115
})
87116
}
88117

118+
const handleWithResponsesApi = async (
119+
c: Context,
120+
anthropicPayload: AnthropicMessagesPayload,
121+
) => {
122+
const responsesPayload =
123+
translateAnthropicMessagesToResponsesPayload(anthropicPayload)
124+
consola.debug(
125+
"Translated Responses payload:",
126+
JSON.stringify(responsesPayload),
127+
)
128+
129+
const { vision, initiator } = getResponsesRequestOptions(responsesPayload)
130+
const response = await createResponses(responsesPayload, {
131+
vision,
132+
initiator,
133+
})
134+
135+
if (responsesPayload.stream && isAsyncIterable(response)) {
136+
consola.debug("Streaming response from Copilot (Responses API)")
137+
return streamSSE(c, async (stream) => {
138+
const streamState = createResponsesStreamState()
139+
140+
for await (const chunk of response) {
141+
const eventName = chunk.event
142+
if (eventName === "ping") {
143+
await stream.writeSSE({ event: "ping", data: "" })
144+
continue
145+
}
146+
147+
const data = chunk.data
148+
if (!data) {
149+
continue
150+
}
151+
152+
consola.debug("Responses raw stream event:", data)
153+
154+
const events = translateResponsesStreamEvent(
155+
JSON.parse(data) as ResponseStreamEvent,
156+
streamState,
157+
)
158+
for (const event of events) {
159+
const eventData = JSON.stringify(event)
160+
consola.debug("Translated Anthropic event:", eventData)
161+
await stream.writeSSE({
162+
event: event.type,
163+
data: eventData,
164+
})
165+
}
166+
}
167+
168+
if (!streamState.messageCompleted) {
169+
consola.warn(
170+
"Responses stream ended without completion; sending fallback message_stop",
171+
)
172+
const fallback = { type: "message_stop" as const }
173+
await stream.writeSSE({
174+
event: fallback.type,
175+
data: JSON.stringify(fallback),
176+
})
177+
}
178+
})
179+
}
180+
181+
consola.debug(
182+
"Non-streaming Responses result:",
183+
JSON.stringify(response).slice(-400),
184+
)
185+
const anthropicResponse = translateResponsesResultToAnthropic(
186+
response as ResponsesResult,
187+
)
188+
consola.debug(
189+
"Translated Anthropic response:",
190+
JSON.stringify(anthropicResponse),
191+
)
192+
return c.json(anthropicResponse)
193+
}
194+
195+
const shouldUseResponsesApi = (modelId: string): boolean => {
196+
const selectedModel = state.models?.data.find((model) => model.id === modelId)
197+
return (
198+
selectedModel?.supported_endpoints?.includes(RESPONSES_ENDPOINT) ?? false
199+
)
200+
}
201+
89202
const isNonStreaming = (
90203
response: Awaited<ReturnType<typeof createChatCompletions>>,
91204
): response is ChatCompletionResponse => Object.hasOwn(response, "choices")
205+
206+
const isAsyncIterable = <T>(value: unknown): value is AsyncIterable<T> =>
207+
Boolean(value)
208+
&& typeof (value as AsyncIterable<T>)[Symbol.asyncIterator] === "function"

0 commit comments

Comments
 (0)