Skip to content
Merged
Show file tree
Hide file tree
Changes from 2 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions packages/inference/README.md
Original file line number Diff line number Diff line change
Expand Up @@ -60,6 +60,7 @@ Currently, we support the following providers:
- [Replicate](https://replicate.com)
- [Sambanova](https://sambanova.ai)
- [Scaleway](https://www.scaleway.com/en/generative-apis/)
- [Clarifai](http://clarifai.com)
- [Together](https://together.xyz)
- [Baseten](https://baseten.co)
- [Blackforestlabs](https://blackforestlabs.ai)
Expand Down Expand Up @@ -97,6 +98,7 @@ Only a subset of models are supported when requesting third-party providers. You
- [Replicate supported models](https://huggingface.co/api/partners/replicate/models)
- [Sambanova supported models](https://huggingface.co/api/partners/sambanova/models)
- [Scaleway supported models](https://huggingface.co/api/partners/scaleway/models)
- [Clarifai supported models](https://huggingface.co/api/partners/clarifai/models)
- [Together supported models](https://huggingface.co/api/partners/together/models)
- [Baseten supported models](https://huggingface.co/api/partners/baseten/models)
- [Cohere supported models](https://huggingface.co/api/partners/cohere/models)
Expand Down
4 changes: 4 additions & 0 deletions packages/inference/src/lib/getProviderHelper.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import * as Baseten from "../providers/baseten.js";
import * as Clarifai from "../providers/clarifai.js";
import * as BlackForestLabs from "../providers/black-forest-labs.js";
import * as Cerebras from "../providers/cerebras.js";
import * as Cohere from "../providers/cohere.js";
Expand Down Expand Up @@ -65,6 +66,9 @@ export const PROVIDERS: Record<InferenceProvider, Partial<Record<InferenceTask,
cerebras: {
conversational: new Cerebras.CerebrasConversationalTask(),
},
clarifai: {
conversational: new Clarifai.ClarifaiConversationalTask(),
},
cohere: {
conversational: new Cohere.CohereConversationalTask(),
},
Expand Down
25 changes: 25 additions & 0 deletions packages/inference/src/providers/clarifai.ts
Original file line number Diff line number Diff line change
@@ -0,0 +1,25 @@
/**
* See the registered mapping of HF model ID => Clarifai model ID here:
*
* https://huggingface.co/api/partners/clarifai/models
*
* This is a publicly available mapping.
*
* If you want to try to run inference for a new model locally before it's registered on huggingface.co,
* you can add it to the dictionary "HARDCODED_MODEL_ID_MAPPING" in consts.ts, for dev purposes.
*
* - If you work at Clarifai and want to update this mapping, please use the model mapping API we provide on huggingface.co
* - If you're a community member and want to add a new supported HF model to Clarifai, please open an issue on the present repo
* and we will tag Clarifai team members.
*
* Thanks!
*/
import { BaseConversationalTask } from "./providerHelper.js";

const CLARIFAI_API_BASE_URL = "https://api.clarifai.com/v2/ext/openai";

export class ClarifaiConversationalTask extends BaseConversationalTask {
constructor() {
super("clarifai", CLARIFAI_API_BASE_URL);
}
}
1 change: 1 addition & 0 deletions packages/inference/src/providers/consts.ts
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@ export const HARDCODED_MODEL_INFERENCE_MAPPING: Record<
baseten: {},
"black-forest-labs": {},
cerebras: {},
clarifai: {},
cohere: {},
"fal-ai": {},
"featherless-ai": {},
Expand Down
1 change: 1 addition & 0 deletions packages/inference/src/types.ts
Original file line number Diff line number Diff line change
Expand Up @@ -48,6 +48,7 @@ export const INFERENCE_PROVIDERS = [
"baseten",
"black-forest-labs",
"cerebras",
"clarifai",
"cohere",
"fal-ai",
"featherless-ai",
Expand Down
58 changes: 58 additions & 0 deletions packages/inference/test/InferenceClient.spec.ts
Original file line number Diff line number Diff line change
Expand Up @@ -2401,4 +2401,62 @@ describe.skip("InferenceClient", () => {
},
TIMEOUT
);

describe.concurrent(
"clarifai",
() => {
const client = new InferenceClient(env.HF_CLARIFAI_KEY ?? "dummy");

HARDCODED_MODEL_INFERENCE_MAPPING["clarifai"] = {
"Qwen/Qwen3-235B-A22B-Instruct-2507": {
provider: "clarifai",
hfModelId: "Qwen/Qwen3-235B-A22B-Instruct-2507",
providerId: "Qwen/Qwen3-235B-A22B-Instruct-2507",
status: "live",
task: "conversational",
},
};

it("chatCompletion - Qwen3 235B Instruct", async () => {
const res = await client.chatCompletion({
model: "Qwen/Qwen3-235B-A22B-Instruct-2507",
provider: "clarifai",
messages: [{ role: "user", content: "What is 5 + 3?" }],
max_tokens: 20,
});
if (res.choices && res.choices.length > 0) {
const completion = res.choices[0].message?.content;
expect(completion).toBeDefined();
expect(typeof completion).toBe("string");
expect(completion).toMatch(/(eight|8)/i);
}
});

it("chatCompletion stream - Qwen3 235B", async () => {
const stream = client.chatCompletionStream({
model: "Qwen/Qwen3-235B-A22B-Instruct-2507",
provider: "clarifai",
messages: [{ role: "user", content: "Count from 1 to 3" }],
stream: true,
max_tokens: 20,
}) as AsyncGenerator<ChatCompletionStreamOutput>;

let fullResponse = "";
for await (const chunk of stream) {
if (chunk.choices && chunk.choices.length > 0) {
const content = chunk.choices[0].delta?.content;
if (content) {
fullResponse += content;
}
}
}

// Verify we got a meaningful response
expect(fullResponse).toBeTruthy();
expect(fullResponse.length).toBeGreaterThan(0);
expect(fullResponse).toMatch(/1.*2.*3/);
});
},
TIMEOUT
);
});