Skip to content

Commit

Permalink
Add ollama (#687)
Browse files Browse the repository at this point in the history
Very popular local LLM inference server.

---------

Co-authored-by: Mishig Davaadorj <[email protected]>
  • Loading branch information
boxabirds and mishig25 authored Oct 16, 2024
1 parent 01ad4c1 commit 11b59f5
Show file tree
Hide file tree
Showing 3 changed files with 26 additions and 0 deletions.
3 changes: 3 additions & 0 deletions packages/tasks/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -51,5 +51,8 @@
"@types/node": "^20.11.5",
"quicktype-core": "https://github.com/huggingface/quicktype/raw/pack-18.0.17/packages/quicktype-core/quicktype-core-18.0.17.tgz",
"type-fest": "^3.13.1"
},
"dependencies": {
"@huggingface/gguf": "workspace:^"
}
}
5 changes: 5 additions & 0 deletions packages/tasks/pnpm-lock.yaml

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

18 changes: 18 additions & 0 deletions packages/tasks/src/local-apps.ts
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import type { ModelData } from "./model-data";
import type { PipelineType } from "./pipelines";
import { parseGGUFQuantLabel } from "@huggingface/gguf";

export interface LocalAppSnippet {
/**
Expand Down Expand Up @@ -53,6 +54,7 @@ export type LocalApp = {
/**
* And if not (mostly llama.cpp), snippet to copy/paste in your terminal
* Support the placeholder {{GGUF_FILE}} that will be replaced by the gguf file path or the list of available files.
* Support the placeholder {{OLLAMA_TAG}} that will be replaced by the list of available quant tags or will be removed if there are no multiple quant files in a same repo.
*/
snippet: (model: ModelData, filepath?: string) => string | string[] | LocalAppSnippet | LocalAppSnippet[];
}
Expand Down Expand Up @@ -143,6 +145,15 @@ const snippetNodeLlamaCppCli = (model: ModelData, filepath?: string): LocalAppSn
];
};

const snippetOllama = (model: ModelData, filepath?: string): string => {
if (filepath) {
const quantLabel = parseGGUFQuantLabel(filepath);
const ollamatag = quantLabel ? `:${quantLabel}` : "";
return `ollama run hf.co/${model.id}${ollamatag}`;
}
return `ollama run hf.co/${model.id}{{OLLAMA_TAG}}`;
};

const snippetLocalAI = (model: ModelData, filepath?: string): LocalAppSnippet[] => {
const command = (binary: string) =>
["# Load and run the model:", `${binary} huggingface://${model.id}/${filepath ?? "{{GGUF_FILE}}"}`].join("\n");
Expand Down Expand Up @@ -389,6 +400,13 @@ export const LOCAL_APPS = {
displayOnModelPage: (model) => model.library_name === "diffusers" && model.pipeline_tag === "text-to-image",
deeplink: (model) => new URL(`https://models.invoke.ai/huggingface/${model.id}`),
},
ollama: {
prettyLabel: "Ollama",
docsUrl: "https://ollama.com",
mainTask: "text-generation",
displayOnModelPage: isLlamaCppGgufModel,
snippet: snippetOllama,
},
} satisfies Record<string, LocalApp>;

export type LocalAppKey = keyof typeof LOCAL_APPS;

0 comments on commit 11b59f5

Please sign in to comment.