diff --git a/packages/tasks/package.json b/packages/tasks/package.json index 82a7d1496..46d1c9636 100644 --- a/packages/tasks/package.json +++ b/packages/tasks/package.json @@ -51,5 +51,8 @@ "@types/node": "^20.11.5", "quicktype-core": "https://github.com/huggingface/quicktype/raw/pack-18.0.17/packages/quicktype-core/quicktype-core-18.0.17.tgz", "type-fest": "^3.13.1" + }, + "dependencies": { + "@huggingface/gguf": "workspace:^" } } diff --git a/packages/tasks/pnpm-lock.yaml b/packages/tasks/pnpm-lock.yaml index 741ee9a42..2d5435a89 100644 --- a/packages/tasks/pnpm-lock.yaml +++ b/packages/tasks/pnpm-lock.yaml @@ -4,6 +4,11 @@ settings: autoInstallPeers: true excludeLinksFromLockfile: false +dependencies: + '@huggingface/gguf': + specifier: workspace:^ + version: link:../gguf + devDependencies: '@types/node': specifier: ^20.11.5 diff --git a/packages/tasks/src/local-apps.ts b/packages/tasks/src/local-apps.ts index 54978f7c7..3fe1cbfac 100644 --- a/packages/tasks/src/local-apps.ts +++ b/packages/tasks/src/local-apps.ts @@ -1,5 +1,6 @@ import type { ModelData } from "./model-data"; import type { PipelineType } from "./pipelines"; +import { parseGGUFQuantLabel } from "@huggingface/gguf"; export interface LocalAppSnippet { /** @@ -53,6 +54,7 @@ export type LocalApp = { /** * And if not (mostly llama.cpp), snippet to copy/paste in your terminal * Support the placeholder {{GGUF_FILE}} that will be replaced by the gguf file path or the list of available files. + * Support the placeholder {{OLLAMA_TAG}} that will be replaced by the list of available quant tags or will be removed if there are no multiple quant files in a same repo. */ snippet: (model: ModelData, filepath?: string) => string | string[] | LocalAppSnippet | LocalAppSnippet[]; } @@ -143,6 +145,15 @@ const snippetNodeLlamaCppCli = (model: ModelData, filepath?: string): LocalAppSn ]; }; +const snippetOllama = (model: ModelData, filepath?: string): string => { + if (filepath) { + const quantLabel = parseGGUFQuantLabel(filepath); + const ollamatag = quantLabel ? `:${quantLabel}` : ""; + return `ollama run hf.co/${model.id}${ollamatag}`; + } + return `ollama run hf.co/${model.id}{{OLLAMA_TAG}}`; +}; + const snippetLocalAI = (model: ModelData, filepath?: string): LocalAppSnippet[] => { const command = (binary: string) => ["# Load and run the model:", `${binary} huggingface://${model.id}/${filepath ?? "{{GGUF_FILE}}"}`].join("\n"); @@ -389,6 +400,13 @@ export const LOCAL_APPS = { displayOnModelPage: (model) => model.library_name === "diffusers" && model.pipeline_tag === "text-to-image", deeplink: (model) => new URL(`https://models.invoke.ai/huggingface/${model.id}`), }, + ollama: { + prettyLabel: "Ollama", + docsUrl: "https://ollama.com", + mainTask: "text-generation", + displayOnModelPage: isLlamaCppGgufModel, + snippet: snippetOllama, + }, } satisfies Record; export type LocalAppKey = keyof typeof LOCAL_APPS;