Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions packages/agent/src/agent-loop.ts
Original file line number Diff line number Diff line change
Expand Up @@ -230,9 +230,13 @@ async function streamAssistantResponse(
const resolvedApiKey =
(config.getApiKey ? await config.getApiKey(config.model.provider) : undefined) || config.apiKey;

// Create custom fetch if configured
const customFetch = config.createFetch?.(config.model);

const response = await streamFunction(config.model, llmContext, {
...config,
apiKey: resolvedApiKey,
fetch: customFetch,
signal,
});

Expand Down
9 changes: 9 additions & 0 deletions packages/agent/src/agent.ts
Original file line number Diff line number Diff line change
Expand Up @@ -71,6 +71,12 @@ export interface AgentOptions {
* Useful for expiring tokens (e.g., GitHub Copilot OAuth).
*/
getApiKey?: (provider: string) => Promise<string | undefined> | string | undefined;

/**
* Factory to create a custom fetch function for HTTP requests.
* Use this to intercept, modify, or log HTTP requests made to the LLM provider.
*/
createFetch?: (model: Model<any>) => typeof globalThis.fetch;
}

export class Agent {
Expand All @@ -97,6 +103,7 @@ export class Agent {
public streamFn: StreamFn;
private _sessionId?: string;
public getApiKey?: (provider: string) => Promise<string | undefined> | string | undefined;
public createFetch?: (model: Model<any>) => typeof globalThis.fetch;
private runningPrompt?: Promise<void>;
private resolveRunningPrompt?: () => void;

Expand All @@ -109,6 +116,7 @@ export class Agent {
this.streamFn = opts.streamFn || streamSimple;
this._sessionId = opts.sessionId;
this.getApiKey = opts.getApiKey;
this.createFetch = opts.createFetch;
}

/**
Expand Down Expand Up @@ -313,6 +321,7 @@ export class Agent {
convertToLlm: this.convertToLlm,
transformContext: this.transformContext,
getApiKey: this.getApiKey,
createFetch: this.createFetch,
getSteeringMessages: async () => {
if (this.steeringMode === "one-at-a-time") {
if (this.steeringQueue.length > 0) {
Expand Down
6 changes: 6 additions & 0 deletions packages/agent/src/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -22,6 +22,12 @@ export type StreamFn = (
export interface AgentLoopConfig extends SimpleStreamOptions {
model: Model<any>;

/**
* Factory to create a custom fetch function for HTTP requests.
* Use this to intercept, modify, or log HTTP requests made to the LLM provider.
*/
createFetch?: (model: Model<any>) => typeof globalThis.fetch;

/**
* Converts AgentMessage[] to LLM-compatible Message[] before each LLM call.
*
Expand Down
10 changes: 9 additions & 1 deletion packages/ai/src/providers/anthropic.ts
Original file line number Diff line number Diff line change
Expand Up @@ -115,7 +115,12 @@ export const streamAnthropic: StreamFunction<"anthropic-messages"> = (

try {
const apiKey = options?.apiKey ?? getEnvApiKey(model.provider) ?? "";
const { client, isOAuthToken } = createClient(model, apiKey, options?.interleavedThinking ?? true);
const { client, isOAuthToken } = createClient(
model,
apiKey,
options?.interleavedThinking ?? true,
options?.fetch,
);
const params = buildParams(model, context, isOAuthToken, options);
const anthropicStream = client.messages.stream({ ...params, stream: true }, { signal: options?.signal });
stream.push({ type: "start", partial: output });
Expand Down Expand Up @@ -282,6 +287,7 @@ function createClient(
model: Model<"anthropic-messages">,
apiKey: string,
interleavedThinking: boolean,
customFetch?: typeof globalThis.fetch,
): { client: Anthropic; isOAuthToken: boolean } {
const betaFeatures = ["fine-grained-tool-streaming-2025-05-14"];
if (interleavedThinking) {
Expand All @@ -302,6 +308,7 @@ function createClient(
baseURL: model.baseUrl,
defaultHeaders,
dangerouslyAllowBrowser: true,
fetch: customFetch,
});

return { client, isOAuthToken: true };
Expand All @@ -318,6 +325,7 @@ function createClient(
baseURL: model.baseUrl,
dangerouslyAllowBrowser: true,
defaultHeaders,
fetch: customFetch,
});

return { client, isOAuthToken: false };
Expand Down
3 changes: 2 additions & 1 deletion packages/ai/src/providers/google-gemini-cli.ts
Original file line number Diff line number Diff line change
Expand Up @@ -259,6 +259,7 @@ export const streamGoogleGeminiCli: StreamFunction<"google-gemini-cli"> = (
const headers = isAntigravity ? ANTIGRAVITY_HEADERS : GEMINI_CLI_HEADERS;

// Fetch with retry logic for rate limits and transient errors
const fetchFn = options?.fetch ?? globalThis.fetch;
let response: Response | undefined;
let lastError: Error | undefined;

Expand All @@ -268,7 +269,7 @@ export const streamGoogleGeminiCli: StreamFunction<"google-gemini-cli"> = (
}

try {
response = await fetch(url, {
response = await fetchFn(url, {
method: "POST",
headers: {
Authorization: `Bearer ${accessToken}`,
Expand Down
8 changes: 8 additions & 0 deletions packages/ai/src/providers/google.ts
Original file line number Diff line number Diff line change
@@ -1,3 +1,11 @@
/**
* Google Generative AI provider using the official @google/genai SDK.
*
* Note: The @google/genai SDK does not expose a custom fetch option in GoogleGenAIOptions.
* The SDK has internal support for custom fetch but it is not accessible via the public
* GoogleGenAI constructor. As a result, HTTP hooks will not fire for this provider.
* Users who need HTTP hooks should use the google-gemini-cli provider instead.
*/
import {
type GenerateContentConfig,
type GenerateContentParameters,
Expand Down
3 changes: 2 additions & 1 deletion packages/ai/src/providers/openai-codex-responses.ts
Original file line number Diff line number Diff line change
Expand Up @@ -141,7 +141,8 @@ export const streamOpenAICodexResponses: StreamFunction<"openai-codex-responses"
headers: redactHeaders(headers),
});

const response = await fetch(url, {
const fetchFn = options?.fetch ?? globalThis.fetch;
const response = await fetchFn(url, {
method: "POST",
headers,
body: JSON.stringify(transformedBody),
Expand Down
10 changes: 8 additions & 2 deletions packages/ai/src/providers/openai-completions.ts
Original file line number Diff line number Diff line change
Expand Up @@ -100,7 +100,7 @@ export const streamOpenAICompletions: StreamFunction<"openai-completions"> = (

try {
const apiKey = options?.apiKey || getEnvApiKey(model.provider) || "";
const client = createClient(model, context, apiKey);
const client = createClient(model, context, apiKey, options?.fetch);
const params = buildParams(model, context, options);
const openaiStream = await client.chat.completions.create(params, { signal: options?.signal });
stream.push({ type: "start", partial: output });
Expand Down Expand Up @@ -315,7 +315,12 @@ export const streamOpenAICompletions: StreamFunction<"openai-completions"> = (
return stream;
};

function createClient(model: Model<"openai-completions">, context: Context, apiKey?: string) {
function createClient(
model: Model<"openai-completions">,
context: Context,
apiKey?: string,
customFetch?: typeof globalThis.fetch,
) {
if (!apiKey) {
if (!process.env.OPENAI_API_KEY) {
throw new Error(
Expand Down Expand Up @@ -356,6 +361,7 @@ function createClient(model: Model<"openai-completions">, context: Context, apiK
baseURL: model.baseUrl,
dangerouslyAllowBrowser: true,
defaultHeaders: headers,
fetch: customFetch,
});
}

Expand Down
10 changes: 8 additions & 2 deletions packages/ai/src/providers/openai-responses.ts
Original file line number Diff line number Diff line change
Expand Up @@ -83,7 +83,7 @@ export const streamOpenAIResponses: StreamFunction<"openai-responses"> = (
try {
// Create OpenAI client
const apiKey = options?.apiKey || getEnvApiKey(model.provider) || "";
const client = createClient(model, context, apiKey);
const client = createClient(model, context, apiKey, options?.fetch);
const params = buildParams(model, context, options);
const openaiStream = await client.responses.create(params, { signal: options?.signal });
stream.push({ type: "start", partial: output });
Expand Down Expand Up @@ -312,7 +312,12 @@ export const streamOpenAIResponses: StreamFunction<"openai-responses"> = (
return stream;
};

function createClient(model: Model<"openai-responses">, context: Context, apiKey?: string) {
function createClient(
model: Model<"openai-responses">,
context: Context,
apiKey?: string,
customFetch?: typeof globalThis.fetch,
) {
if (!apiKey) {
if (!process.env.OPENAI_API_KEY) {
throw new Error(
Expand Down Expand Up @@ -353,6 +358,7 @@ function createClient(model: Model<"openai-responses">, context: Context, apiKey
baseURL: model.baseUrl,
dangerouslyAllowBrowser: true,
defaultHeaders: headers,
fetch: customFetch,
});
}

Expand Down
1 change: 1 addition & 0 deletions packages/ai/src/stream.ts
Original file line number Diff line number Diff line change
Expand Up @@ -178,6 +178,7 @@ function mapOptionsForApi<TApi extends Api>(
signal: options?.signal,
apiKey: apiKey || options?.apiKey,
sessionId: options?.sessionId,
fetch: options?.fetch,
};

// Helper to clamp xhigh to high for providers that don't support it
Expand Down
6 changes: 6 additions & 0 deletions packages/ai/src/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,12 @@ export interface StreamOptions {
* session-aware features. Ignored by providers that don't support it.
*/
sessionId?: string;
/**
* Custom fetch function for HTTP hooks.
* Note: The google-generative-ai provider does not support custom fetch due to SDK limitations.
* Use google-gemini-cli instead if you need HTTP hooks for Google models.
*/
fetch?: typeof globalThis.fetch;
}

// Unified options with reasoning passed to streamSimple() and completeSimple()
Expand Down
7 changes: 7 additions & 0 deletions packages/coding-agent/CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -648,6 +648,13 @@ Total color count increased from 46 to 50. See [docs/theme.md](docs/theme.md) fo
- **API key priority**: `ANTHROPIC_OAUTH_TOKEN` now takes precedence over `ANTHROPIC_API_KEY`
- HTML export template split into separate files (template.html, template.css, template.js) for easier maintenance

### Added

- **`http_request` hook event**: Fired before HTTP requests to LLM providers. Hooks can add custom headers or cancel requests. API keys are automatically redacted from headers.
- **`http_response` hook event**: Fired after HTTP responses from LLM providers. Includes status, headers, and request duration for logging and monitoring.

**Note:** The `google-generative-ai` provider does not support HTTP hooks due to SDK limitations. Use `google-gemini-cli` instead for Google models if you need HTTP hooks.

### Fixed

- HTML export now properly sanitizes user messages containing HTML tags like `<style>` that could break DOM rendering
Expand Down
150 changes: 150 additions & 0 deletions packages/coding-agent/src/core/extensions/http.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,150 @@
import type { Model } from "@mariozechner/pi-ai";
import type { ExtensionRunner } from "./runner.js";

/**
* Headers that should have their values redacted in http_request events.
* These headers contain sensitive authentication information.
*/
const REDACTED_HEADERS = [
"authorization",
"x-api-key",
"api-key",
"x-goog-api-key",
"anthropic-api-key",
"proxy-authorization",
"cookie",
"set-cookie",
];

/**
* Patterns to detect sensitive headers by name.
* If a header name (lowercase) contains any of these, it should be redacted.
*/
const SENSITIVE_HEADER_PATTERNS = ["auth", "token", "key", "secret", "cookie"];

/**
* Redact sensitive header values from a headers object.
*/
export function redactHeaders(headers: Record<string, string>): Record<string, string> {
const result: Record<string, string> = {};
for (const [key, value] of Object.entries(headers)) {
const lowerKey = key.toLowerCase();
const shouldRedact =
REDACTED_HEADERS.includes(lowerKey) || SENSITIVE_HEADER_PATTERNS.some((pattern) => lowerKey.includes(pattern));
result[key] = shouldRedact ? "[REDACTED]" : value;
}
return result;
}

/**
* Convert Headers object to plain Record.
*/
export function headersToRecord(headers: Headers): Record<string, string> {
const result: Record<string, string> = {};
headers.forEach((value, key) => {
result[key] = value;
});
return result;
}

/**
* Create a fetch factory function for extension HTTP events.
*
* Returns undefined if no http_request or http_response handlers are registered,
* allowing the Agent to use the default fetch.
*
* When handlers exist, returns a factory that creates a fetch wrapper which:
* - Emits http_request events before requests (with redacted auth headers)
* - Allows extensions to add custom headers or cancel requests
* - Emits http_response events after responses with timing
*
* Note: Not all providers support a custom fetch (depends on underlying SDK usage).
*/
export function createExtensionFetchFactory(
runner: ExtensionRunner | undefined,
): ((model: Model<any>) => typeof globalThis.fetch) | undefined {
if (!runner) return undefined;

const hasHttpRequestHandlers = runner.hasHandlers("http_request");
const hasHttpResponseHandlers = runner.hasHandlers("http_response");

if (!hasHttpRequestHandlers && !hasHttpResponseHandlers) {
return undefined;
}

return (model: Model<any>): typeof globalThis.fetch => {
return async (input: string | URL | Request, init?: RequestInit): Promise<Response> => {
const url = typeof input === "string" ? input : input instanceof URL ? input.toString() : input.url;
const method =
init?.method || (typeof input !== "string" && !(input instanceof URL) ? input.method : undefined) || "GET";

const normalizeHeaderValue = (value: unknown): string => {
if (Array.isArray(value)) {
return value.map((v) => String(v)).join(", ");
}
if (value === undefined || value === null) return "";
return String(value);
};

// Build headers record from input Request + init
let headers: Record<string, string> = {};

if (typeof input !== "string" && !(input instanceof URL)) {
headers = headersToRecord(input.headers);
}

if (init?.headers) {
if (init.headers instanceof Headers) {
headers = { ...headers, ...headersToRecord(init.headers) };
} else if (Array.isArray(init.headers)) {
for (const [key, value] of init.headers) {
headers[key] = normalizeHeaderValue(value);
}
} else {
for (const [key, value] of Object.entries(init.headers)) {
headers[key] = normalizeHeaderValue(value);
}
}
}

// Emit http_request event with redacted headers
let extraHeaders: Record<string, string> | undefined;
if (hasHttpRequestHandlers) {
const result = await runner.emitHttpRequest({
provider: model.provider,
modelId: model.id,
url,
method,
headers: redactHeaders(headers),
body: typeof init?.body === "string" ? init.body : undefined,
});

if (result?.cancel) {
throw new Error("HTTP request cancelled by extension");
}
extraHeaders = result?.headers;
}

const finalInit: RequestInit = { ...init };
if (extraHeaders) {
finalInit.headers = { ...headers, ...extraHeaders };
}

const startTime = Date.now();
const response = await globalThis.fetch(input, finalInit);
const durationMs = Date.now() - startTime;

if (hasHttpResponseHandlers) {
await runner.emitHttpResponse({
provider: model.provider,
modelId: model.id,
status: response.status,
headers: headersToRecord(response.headers),
durationMs,
});
}

return response;
};
};
}
3 changes: 3 additions & 0 deletions packages/coding-agent/src/core/extensions/index.ts
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,9 @@ export type {
GetActiveToolsHandler,
GetAllToolsHandler,
GrepToolResultEvent,
HttpRequestEvent,
HttpRequestEventResult,
HttpResponseEvent,
LoadExtensionsResult,
// Loaded Extension
LoadedExtension,
Expand Down
Loading