diff --git a/packages/inference/src/lib/makeRequestOptions.ts b/packages/inference/src/lib/makeRequestOptions.ts index 93cd5ce8f..a9c9f332c 100644 --- a/packages/inference/src/lib/makeRequestOptions.ts +++ b/packages/inference/src/lib/makeRequestOptions.ts @@ -154,7 +154,7 @@ function mapModel(params: { model: string; provider: InferenceProvider; taskHint })(); if (!model) { - throw new Error(`Model ${params.model} is not supported for provider ${params.provider}`); + throw new Error(`Model ${params.model} is not supported for task ${task} and provider ${params.provider}`); } return model; } diff --git a/packages/inference/test/HfInference.spec.ts b/packages/inference/test/HfInference.spec.ts index 545db71ea..cb2270685 100644 --- a/packages/inference/test/HfInference.spec.ts +++ b/packages/inference/test/HfInference.spec.ts @@ -2,7 +2,7 @@ import { expect, it, describe, assert } from "vitest"; import type { ChatCompletionStreamOutput } from "@huggingface/tasks"; -import { HfInference } from "../src"; +import { chatCompletion, HfInference } from "../src"; import "./vcr"; import { readTestFile } from "./test-files"; @@ -911,4 +911,17 @@ describe.concurrent("HfInference", () => { }, TIMEOUT ); + + describe.concurrent("3rd party providers", () => { + it("chatCompletion - fails with unsupported model", async () => { + expect( + chatCompletion({ + model: "black-forest-labs/Flux.1-dev", + provider: "together", + messages: [{ role: "user", content: "Complete this sentence with words, one plus one is equal " }], + accessToken: env.HF_TOGETHER_KEY + }) + ).rejects.toThrowError("Model black-forest-labs/Flux.1-dev is not supported for task conversational and provider together") + }); + }) });