From 1821ea7279a3cc5daacb41c476abc1b19ce091e4 Mon Sep 17 00:00:00 2001 From: DaniAkash Date: Thu, 2 Oct 2025 11:24:35 +0530 Subject: [PATCH 1/7] feat: setup clarifai conversational support --- packages/inference/README.md | 2 + .../inference/src/lib/getProviderHelper.ts | 4 ++ packages/inference/src/providers/clarifai.ts | 25 ++++++++ packages/inference/src/providers/consts.ts | 1 + packages/inference/src/types.ts | 1 + .../inference/test/InferenceClient.spec.ts | 58 +++++++++++++++++++ 6 files changed, 91 insertions(+) create mode 100644 packages/inference/src/providers/clarifai.ts diff --git a/packages/inference/README.md b/packages/inference/README.md index b2910ed186..db32de6e4e 100644 --- a/packages/inference/README.md +++ b/packages/inference/README.md @@ -60,6 +60,7 @@ Currently, we support the following providers: - [Replicate](https://replicate.com) - [Sambanova](https://sambanova.ai) - [Scaleway](https://www.scaleway.com/en/generative-apis/) +- [Clarifai](http://clarifai.com) - [Together](https://together.xyz) - [Baseten](https://baseten.co) - [Blackforestlabs](https://blackforestlabs.ai) @@ -97,6 +98,7 @@ Only a subset of models are supported when requesting third-party providers. You - [Replicate supported models](https://huggingface.co/api/partners/replicate/models) - [Sambanova supported models](https://huggingface.co/api/partners/sambanova/models) - [Scaleway supported models](https://huggingface.co/api/partners/scaleway/models) +- [Clarifai Supported models](https://huggingface.co/api/partners/clarifai/models) - [Together supported models](https://huggingface.co/api/partners/together/models) - [Baseten supported models](https://huggingface.co/api/partners/baseten/models) - [Cohere supported models](https://huggingface.co/api/partners/cohere/models) diff --git a/packages/inference/src/lib/getProviderHelper.ts b/packages/inference/src/lib/getProviderHelper.ts index c41f929cab..ad95683065 100644 --- a/packages/inference/src/lib/getProviderHelper.ts +++ b/packages/inference/src/lib/getProviderHelper.ts @@ -1,4 +1,5 @@ import * as Baseten from "../providers/baseten.js"; +import * as Clarifai from "../providers/clarifai.js"; import * as BlackForestLabs from "../providers/black-forest-labs.js"; import * as Cerebras from "../providers/cerebras.js"; import * as Cohere from "../providers/cohere.js"; @@ -59,6 +60,9 @@ export const PROVIDERS: Record Clarifai model ID here: + * + * https://huggingface.co/api/partners/clarifai/models + * + * This is a publicly available mapping. + * + * If you want to try to run inference for a new model locally before it's registered on huggingface.co, + * you can add it to the dictionary "HARDCODED_MODEL_ID_MAPPING" in consts.ts, for dev purposes. + * + * - If you work at Clarifai and want to update this mapping, please use the model mapping API we provide on huggingface.co + * - If you're a community member and want to add a new supported HF model to Clarifai, please open an issue on the present repo + * and we will tag Clarifai team members. + * + * Thanks! + */ +import { BaseConversationalTask } from "./providerHelper.js"; + +const CLARIFAI_API_BASE_URL = "https://api.clarifai.com/v2/ext/openai"; + +export class ClarifaiConversationalTask extends BaseConversationalTask { + constructor() { + super("clarifai", CLARIFAI_API_BASE_URL); + } +} diff --git a/packages/inference/src/providers/consts.ts b/packages/inference/src/providers/consts.ts index 1dfa656736..732856beae 100644 --- a/packages/inference/src/providers/consts.ts +++ b/packages/inference/src/providers/consts.ts @@ -37,6 +37,7 @@ export const HARDCODED_MODEL_INFERENCE_MAPPING: Record< replicate: {}, sambanova: {}, scaleway: {}, + clarifai: {}, together: {}, "zai-org": {}, }; diff --git a/packages/inference/src/types.ts b/packages/inference/src/types.ts index b5d4815b5a..242b1f2b0e 100644 --- a/packages/inference/src/types.ts +++ b/packages/inference/src/types.ts @@ -64,6 +64,7 @@ export const INFERENCE_PROVIDERS = [ "replicate", "sambanova", "scaleway", + "clarifai", "together", "zai-org", ] as const; diff --git a/packages/inference/test/InferenceClient.spec.ts b/packages/inference/test/InferenceClient.spec.ts index 4520c389a1..dd0ab58332 100644 --- a/packages/inference/test/InferenceClient.spec.ts +++ b/packages/inference/test/InferenceClient.spec.ts @@ -2401,4 +2401,62 @@ describe.skip("InferenceClient", () => { }, TIMEOUT ); + + describe.concurrent( + "clarifai", + () => { + const client = new InferenceClient(env.HF_CLARIFAI_KEY ?? "dummy"); + + HARDCODED_MODEL_INFERENCE_MAPPING["clarifai"] = { + "Qwen/Qwen3-235B-A22B-Instruct-2507": { + provider: "clarifai", + hfModelId: "Qwen/Qwen3-235B-A22B-Instruct-2507", + providerId: "Qwen/Qwen3-235B-A22B-Instruct-2507", + status: "live", + task: "conversational", + }, + }; + + it("chatCompletion - Qwen3 235B Instruct", async () => { + const res = await client.chatCompletion({ + model: "Qwen/Qwen3-235B-A22B-Instruct-2507", + provider: "clarifai", + messages: [{ role: "user", content: "What is 5 + 3?" }], + max_tokens: 20, + }); + if (res.choices && res.choices.length > 0) { + const completion = res.choices[0].message?.content; + expect(completion).toBeDefined(); + expect(typeof completion).toBe("string"); + expect(completion).toMatch(/(eight|8)/i); + } + }); + + it("chatCompletion stream - Qwen3 235B", async () => { + const stream = client.chatCompletionStream({ + model: "Qwen/Qwen3-235B-A22B-Instruct-2507", + provider: "clarifai", + messages: [{ role: "user", content: "Count from 1 to 3" }], + stream: true, + max_tokens: 20, + }) as AsyncGenerator; + + let fullResponse = ""; + for await (const chunk of stream) { + if (chunk.choices && chunk.choices.length > 0) { + const content = chunk.choices[0].delta?.content; + if (content) { + fullResponse += content; + } + } + } + + // Verify we got a meaningful response + expect(fullResponse).toBeTruthy(); + expect(fullResponse.length).toBeGreaterThan(0); + expect(fullResponse).toMatch(/1.*2.*3/); + }); + }, + TIMEOUT + ); }); From ae7b0e6a4bb946dc961556fc3cd8974d26f06fc9 Mon Sep 17 00:00:00 2001 From: DaniAkash Date: Thu, 2 Oct 2025 18:06:07 +0530 Subject: [PATCH 2/7] chore: address review comments --- packages/inference/README.md | 2 +- packages/inference/src/lib/getProviderHelper.ts | 6 +++--- packages/inference/src/providers/consts.ts | 2 +- packages/inference/src/types.ts | 2 +- 4 files changed, 6 insertions(+), 6 deletions(-) diff --git a/packages/inference/README.md b/packages/inference/README.md index db32de6e4e..d59caa67c1 100644 --- a/packages/inference/README.md +++ b/packages/inference/README.md @@ -98,7 +98,7 @@ Only a subset of models are supported when requesting third-party providers. You - [Replicate supported models](https://huggingface.co/api/partners/replicate/models) - [Sambanova supported models](https://huggingface.co/api/partners/sambanova/models) - [Scaleway supported models](https://huggingface.co/api/partners/scaleway/models) -- [Clarifai Supported models](https://huggingface.co/api/partners/clarifai/models) +- [Clarifai supported models](https://huggingface.co/api/partners/clarifai/models) - [Together supported models](https://huggingface.co/api/partners/together/models) - [Baseten supported models](https://huggingface.co/api/partners/baseten/models) - [Cohere supported models](https://huggingface.co/api/partners/cohere/models) diff --git a/packages/inference/src/lib/getProviderHelper.ts b/packages/inference/src/lib/getProviderHelper.ts index ad95683065..ee39f50342 100644 --- a/packages/inference/src/lib/getProviderHelper.ts +++ b/packages/inference/src/lib/getProviderHelper.ts @@ -60,15 +60,15 @@ export const PROVIDERS: Record Date: Thu, 2 Oct 2025 18:18:28 +0530 Subject: [PATCH 3/7] feat: use makeRoute method to construct url --- packages/inference/src/providers/clarifai.ts | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/packages/inference/src/providers/clarifai.ts b/packages/inference/src/providers/clarifai.ts index b2aa064a54..083105eb69 100644 --- a/packages/inference/src/providers/clarifai.ts +++ b/packages/inference/src/providers/clarifai.ts @@ -16,10 +16,14 @@ */ import { BaseConversationalTask } from "./providerHelper.js"; -const CLARIFAI_API_BASE_URL = "https://api.clarifai.com/v2/ext/openai"; +const CLARIFAI_API_BASE_URL = "https://api.clarifai.com"; export class ClarifaiConversationalTask extends BaseConversationalTask { constructor() { super("clarifai", CLARIFAI_API_BASE_URL); } + + override makeRoute(): string { + return "/v2/ext/openai/v1/chat/completions"; + } } From e47414346eea1f16e0be7ee8ac17bf76d8099ffd Mon Sep 17 00:00:00 2001 From: DaniAkash Date: Thu, 2 Oct 2025 18:23:28 +0530 Subject: [PATCH 4/7] docs: reorder for consistency --- packages/inference/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/inference/README.md b/packages/inference/README.md index d59caa67c1..db3f64b35f 100644 --- a/packages/inference/README.md +++ b/packages/inference/README.md @@ -98,9 +98,9 @@ Only a subset of models are supported when requesting third-party providers. You - [Replicate supported models](https://huggingface.co/api/partners/replicate/models) - [Sambanova supported models](https://huggingface.co/api/partners/sambanova/models) - [Scaleway supported models](https://huggingface.co/api/partners/scaleway/models) -- [Clarifai supported models](https://huggingface.co/api/partners/clarifai/models) - [Together supported models](https://huggingface.co/api/partners/together/models) - [Baseten supported models](https://huggingface.co/api/partners/baseten/models) +- [Clarifai supported models](https://huggingface.co/api/partners/clarifai/models) - [Cohere supported models](https://huggingface.co/api/partners/cohere/models) - [Cerebras supported models](https://huggingface.co/api/partners/cerebras/models) - [Groq supported models](https://console.groq.com/docs/models) From 6adf9660b5ba842a9036f23d04dadfe3e319a406 Mon Sep 17 00:00:00 2001 From: DaniAkash Date: Fri, 3 Oct 2025 14:17:35 +0530 Subject: [PATCH 5/7] fix: tests with clarifai provider --- packages/inference/src/providers/clarifai.ts | 12 ++++++++++++ packages/inference/test/InferenceClient.spec.ts | 14 +++++++------- 2 files changed, 19 insertions(+), 7 deletions(-) diff --git a/packages/inference/src/providers/clarifai.ts b/packages/inference/src/providers/clarifai.ts index 083105eb69..574a6847eb 100644 --- a/packages/inference/src/providers/clarifai.ts +++ b/packages/inference/src/providers/clarifai.ts @@ -15,6 +15,7 @@ * Thanks! */ import { BaseConversationalTask } from "./providerHelper.js"; +import type { HeaderParams } from "../types.js"; const CLARIFAI_API_BASE_URL = "https://api.clarifai.com"; @@ -26,4 +27,15 @@ export class ClarifaiConversationalTask extends BaseConversationalTask { override makeRoute(): string { return "/v2/ext/openai/v1/chat/completions"; } + + override prepareHeaders(params: HeaderParams, isBinary: boolean): Record { + const headers: Record = {}; + if (params.authMethod !== "none") { + headers["Authorization"] = `Key ${params.accessToken}`; + } + if (!isBinary) { + headers["Content-Type"] = "application/json"; + } + return headers; + } } diff --git a/packages/inference/test/InferenceClient.spec.ts b/packages/inference/test/InferenceClient.spec.ts index dd0ab58332..8f530efc94 100644 --- a/packages/inference/test/InferenceClient.spec.ts +++ b/packages/inference/test/InferenceClient.spec.ts @@ -2408,18 +2408,18 @@ describe.skip("InferenceClient", () => { const client = new InferenceClient(env.HF_CLARIFAI_KEY ?? "dummy"); HARDCODED_MODEL_INFERENCE_MAPPING["clarifai"] = { - "Qwen/Qwen3-235B-A22B-Instruct-2507": { + "clarifai/DeepSeek-V3_1": { provider: "clarifai", - hfModelId: "Qwen/Qwen3-235B-A22B-Instruct-2507", - providerId: "Qwen/Qwen3-235B-A22B-Instruct-2507", + hfModelId: "clarifai/DeepSeek-V3_1", + providerId: "deepseek-ai/deepseek-chat/models/DeepSeek-V3_1", status: "live", task: "conversational", }, }; - it("chatCompletion - Qwen3 235B Instruct", async () => { + it("chatCompletion - DeepSeek-V3_1", async () => { const res = await client.chatCompletion({ - model: "Qwen/Qwen3-235B-A22B-Instruct-2507", + model: "clarifai/DeepSeek-V3_1", provider: "clarifai", messages: [{ role: "user", content: "What is 5 + 3?" }], max_tokens: 20, @@ -2432,9 +2432,9 @@ describe.skip("InferenceClient", () => { } }); - it("chatCompletion stream - Qwen3 235B", async () => { + it("chatCompletion stream - DeepSeek-V3_1", async () => { const stream = client.chatCompletionStream({ - model: "Qwen/Qwen3-235B-A22B-Instruct-2507", + model: "clarifai/DeepSeek-V3_1", provider: "clarifai", messages: [{ role: "user", content: "Count from 1 to 3" }], stream: true, From 87fe6c275d0ee4f90a90d71801bab36e3ba0e51f Mon Sep 17 00:00:00 2001 From: DaniAkash Date: Fri, 3 Oct 2025 14:24:39 +0530 Subject: [PATCH 6/7] fix: model name key --- packages/inference/test/InferenceClient.spec.ts | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/inference/test/InferenceClient.spec.ts b/packages/inference/test/InferenceClient.spec.ts index 8f530efc94..3ea6b55241 100644 --- a/packages/inference/test/InferenceClient.spec.ts +++ b/packages/inference/test/InferenceClient.spec.ts @@ -2408,9 +2408,9 @@ describe.skip("InferenceClient", () => { const client = new InferenceClient(env.HF_CLARIFAI_KEY ?? "dummy"); HARDCODED_MODEL_INFERENCE_MAPPING["clarifai"] = { - "clarifai/DeepSeek-V3_1": { + "deepseek-ai/DeepSeek-V3.1": { provider: "clarifai", - hfModelId: "clarifai/DeepSeek-V3_1", + hfModelId: "deepseek-ai/DeepSeek-V3.1", providerId: "deepseek-ai/deepseek-chat/models/DeepSeek-V3_1", status: "live", task: "conversational", @@ -2419,7 +2419,7 @@ describe.skip("InferenceClient", () => { it("chatCompletion - DeepSeek-V3_1", async () => { const res = await client.chatCompletion({ - model: "clarifai/DeepSeek-V3_1", + model: "deepseek-ai/DeepSeek-V3.1", provider: "clarifai", messages: [{ role: "user", content: "What is 5 + 3?" }], max_tokens: 20, @@ -2434,7 +2434,7 @@ describe.skip("InferenceClient", () => { it("chatCompletion stream - DeepSeek-V3_1", async () => { const stream = client.chatCompletionStream({ - model: "clarifai/DeepSeek-V3_1", + model: "deepseek-ai/DeepSeek-V3.1", provider: "clarifai", messages: [{ role: "user", content: "Count from 1 to 3" }], stream: true, From 9ebbb125bf3e0153825a0425e416c46904b123f5 Mon Sep 17 00:00:00 2001 From: DaniAkash Date: Fri, 3 Oct 2025 18:16:33 +0530 Subject: [PATCH 7/7] feat: better check for setting auth headers --- packages/inference/src/providers/clarifai.ts | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/inference/src/providers/clarifai.ts b/packages/inference/src/providers/clarifai.ts index 574a6847eb..b0f3196d57 100644 --- a/packages/inference/src/providers/clarifai.ts +++ b/packages/inference/src/providers/clarifai.ts @@ -29,10 +29,10 @@ export class ClarifaiConversationalTask extends BaseConversationalTask { } override prepareHeaders(params: HeaderParams, isBinary: boolean): Record { - const headers: Record = {}; - if (params.authMethod !== "none") { - headers["Authorization"] = `Key ${params.accessToken}`; - } + const headers: Record = { + Authorization: + params.authMethod !== "provider-key" ? `Bearer ${params.accessToken}` : `Key ${params.accessToken}`, + }; if (!isBinary) { headers["Content-Type"] = "application/json"; }