From 61e68f3b17853697c7eda234f79e54f1e7a1af82 Mon Sep 17 00:00:00 2001 From: Aarni Koskela Date: Tue, 14 Jan 2025 18:25:05 +0200 Subject: [PATCH] chore: use futuristic type annotations --- ollama/_client.py | 349 +++++++++++++++++++++++----------------------- ollama/_types.py | 234 ++++++++++++++++--------------- ollama/_utils.py | 4 +- poetry.lock | 18 ++- pyproject.toml | 1 + requirements.txt | 3 + 6 files changed, 314 insertions(+), 295 deletions(-) diff --git a/ollama/_client.py b/ollama/_client.py index cbe43c9..8e599e1 100644 --- a/ollama/_client.py +++ b/ollama/_client.py @@ -1,3 +1,5 @@ +from __future__ import annotations + import ipaddress import json import os @@ -10,15 +12,10 @@ from typing import ( Any, Callable, - Dict, - List, Literal, Mapping, - Optional, Sequence, - Type, TypeVar, - Union, overload, ) @@ -74,10 +71,10 @@ class BaseClient: def __init__( self, client, - host: Optional[str] = None, + host: str | None = None, follow_redirects: bool = True, timeout: Any = None, - headers: Optional[Mapping[str, str]] = None, + headers: Mapping[str, str] | None = None, **kwargs, ) -> None: """ @@ -110,7 +107,7 @@ def __init__( class Client(BaseClient): - def __init__(self, host: Optional[str] = None, **kwargs) -> None: + def __init__(self, host: str | None = None, **kwargs) -> None: super().__init__(httpx.Client, host, **kwargs) def _request_raw(self, *args, **kwargs): @@ -126,7 +123,7 @@ def _request_raw(self, *args, **kwargs): @overload def _request( self, - cls: Type[T], + cls: type[T], *args, stream: Literal[False] = False, **kwargs, @@ -135,7 +132,7 @@ def _request( @overload def _request( self, - cls: Type[T], + cls: type[T], *args, stream: Literal[True] = True, **kwargs, @@ -144,19 +141,19 @@ def _request( @overload def _request( self, - cls: Type[T], + cls: type[T], *args, stream: bool = False, **kwargs, - ) -> Union[T, Iterator[T]]: ... + ) -> T | Iterator[T]: ... def _request( self, - cls: Type[T], + cls: type[T], *args, stream: bool = False, **kwargs, - ) -> Union[T, Iterator[T]]: + ) -> T | Iterator[T]: if stream: def inner(): @@ -186,13 +183,13 @@ def generate( *, system: str = '', template: str = '', - context: Optional[Sequence[int]] = None, + context: Sequence[int] | None = None, stream: Literal[False] = False, raw: bool = False, - format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None, - images: Optional[Sequence[Union[str, bytes]]] = None, - options: Optional[Union[Mapping[str, Any], Options]] = None, - keep_alive: Optional[Union[float, str]] = None, + format: Literal['', 'json'] | JsonSchemaValue | None = None, + images: Sequence[str | bytes] | None = None, + options: Mapping[str, Any] | Options | None = None, + keep_alive: float | str | None = None, ) -> GenerateResponse: ... @overload @@ -204,31 +201,31 @@ def generate( *, system: str = '', template: str = '', - context: Optional[Sequence[int]] = None, + context: Sequence[int] | None = None, stream: Literal[True] = True, raw: bool = False, - format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None, - images: Optional[Sequence[Union[str, bytes]]] = None, - options: Optional[Union[Mapping[str, Any], Options]] = None, - keep_alive: Optional[Union[float, str]] = None, + format: Literal['', 'json'] | JsonSchemaValue | None = None, + images: Sequence[str | bytes] | None = None, + options: Mapping[str, Any] | Options | None = None, + keep_alive: float | str | None = None, ) -> Iterator[GenerateResponse]: ... def generate( self, model: str = '', - prompt: Optional[str] = None, - suffix: Optional[str] = None, + prompt: str | None = None, + suffix: str | None = None, *, - system: Optional[str] = None, - template: Optional[str] = None, - context: Optional[Sequence[int]] = None, + system: str | None = None, + template: str | None = None, + context: Sequence[int] | None = None, stream: bool = False, - raw: Optional[bool] = None, - format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None, - images: Optional[Sequence[Union[str, bytes]]] = None, - options: Optional[Union[Mapping[str, Any], Options]] = None, - keep_alive: Optional[Union[float, str]] = None, - ) -> Union[GenerateResponse, Iterator[GenerateResponse]]: + raw: bool | None = None, + format: Literal['', 'json'] | JsonSchemaValue | None = None, + images: Sequence[str | bytes] | None = None, + options: Mapping[str, Any] | Options | None = None, + keep_alive: float | str | None = None, + ) -> GenerateResponse | Iterator[GenerateResponse]: """ Create a response using the requested model. @@ -264,39 +261,39 @@ def generate( def chat( self, model: str = '', - messages: Optional[Sequence[Union[Mapping[str, Any], Message]]] = None, + messages: Sequence[Mapping[str, Any] | Message] | None = None, *, - tools: Optional[Sequence[Union[Mapping[str, Any], Tool, Callable]]] = None, + tools: Sequence[Mapping[str, Any] | Tool | Callable] | None = None, stream: Literal[False] = False, - format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None, - options: Optional[Union[Mapping[str, Any], Options]] = None, - keep_alive: Optional[Union[float, str]] = None, + format: Literal['', 'json'] | JsonSchemaValue | None = None, + options: Mapping[str, Any] | Options | None = None, + keep_alive: float | str | None = None, ) -> ChatResponse: ... @overload def chat( self, model: str = '', - messages: Optional[Sequence[Union[Mapping[str, Any], Message]]] = None, + messages: Sequence[Mapping[str, Any] | Message] | None = None, *, - tools: Optional[Sequence[Union[Mapping[str, Any], Tool, Callable]]] = None, + tools: Sequence[Mapping[str, Any] | Tool | Callable] | None = None, stream: Literal[True] = True, - format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None, - options: Optional[Union[Mapping[str, Any], Options]] = None, - keep_alive: Optional[Union[float, str]] = None, + format: Literal['', 'json'] | JsonSchemaValue | None = None, + options: Mapping[str, Any] | Options | None = None, + keep_alive: float | str | None = None, ) -> Iterator[ChatResponse]: ... def chat( self, model: str = '', - messages: Optional[Sequence[Union[Mapping[str, Any], Message]]] = None, + messages: Sequence[Mapping[str, Any] | Message] | None = None, *, - tools: Optional[Sequence[Union[Mapping[str, Any], Tool, Callable]]] = None, + tools: Sequence[Mapping[str, Any] | Tool | Callable] | None = None, stream: bool = False, - format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None, - options: Optional[Union[Mapping[str, Any], Options]] = None, - keep_alive: Optional[Union[float, str]] = None, - ) -> Union[ChatResponse, Iterator[ChatResponse]]: + format: Literal['', 'json'] | JsonSchemaValue | None = None, + options: Mapping[str, Any] | Options | None = None, + keep_alive: float | str | None = None, + ) -> ChatResponse | Iterator[ChatResponse]: """ Create a chat response using the requested model. @@ -349,10 +346,10 @@ def add_two_numbers(a: int, b: int) -> int: def embed( self, model: str = '', - input: Union[str, Sequence[str]] = '', - truncate: Optional[bool] = None, - options: Optional[Union[Mapping[str, Any], Options]] = None, - keep_alive: Optional[Union[float, str]] = None, + input: str | Sequence[str] = '', + truncate: bool | None = None, + options: Mapping[str, Any] | Options | None = None, + keep_alive: float | str | None = None, ) -> EmbedResponse: return self._request( EmbedResponse, @@ -370,9 +367,9 @@ def embed( def embeddings( self, model: str = '', - prompt: Optional[str] = None, - options: Optional[Union[Mapping[str, Any], Options]] = None, - keep_alive: Optional[Union[float, str]] = None, + prompt: str | None = None, + options: Mapping[str, Any] | Options | None = None, + keep_alive: float | str | None = None, ) -> EmbeddingsResponse: """ Deprecated in favor of `embed`. @@ -413,7 +410,7 @@ def pull( *, insecure: bool = False, stream: bool = False, - ) -> Union[ProgressResponse, Iterator[ProgressResponse]]: + ) -> ProgressResponse | Iterator[ProgressResponse]: """ Raises `ResponseError` if the request could not be fulfilled. @@ -455,7 +452,7 @@ def push( *, insecure: bool = False, stream: bool = False, - ) -> Union[ProgressResponse, Iterator[ProgressResponse]]: + ) -> ProgressResponse | Iterator[ProgressResponse]: """ Raises `ResponseError` if the request could not be fulfilled. @@ -477,15 +474,15 @@ def push( def create( self, model: str, - quantize: Optional[str] = None, - from_: Optional[str] = None, - files: Optional[Dict[str, str]] = None, - adapters: Optional[Dict[str, str]] = None, - template: Optional[str] = None, - license: Optional[Union[str, List[str]]] = None, - system: Optional[str] = None, - parameters: Optional[Union[Mapping[str, Any], Options]] = None, - messages: Optional[Sequence[Union[Mapping[str, Any], Message]]] = None, + quantize: str | None = None, + from_: str | None = None, + files: dict[str, str] | None = None, + adapters: dict[str, str] | None = None, + template: str | None = None, + license: str | list[str] | None = None, + system: str | None = None, + parameters: Mapping[str, Any] | Options | None = None, + messages: Sequence[Mapping[str, Any] | Message] | None = None, *, stream: Literal[False] = False, ) -> ProgressResponse: ... @@ -494,15 +491,15 @@ def create( def create( self, model: str, - quantize: Optional[str] = None, - from_: Optional[str] = None, - files: Optional[Dict[str, str]] = None, - adapters: Optional[Dict[str, str]] = None, - template: Optional[str] = None, - license: Optional[Union[str, List[str]]] = None, - system: Optional[str] = None, - parameters: Optional[Union[Mapping[str, Any], Options]] = None, - messages: Optional[Sequence[Union[Mapping[str, Any], Message]]] = None, + quantize: str | None = None, + from_: str | None = None, + files: dict[str, str] | None = None, + adapters: dict[str, str] | None = None, + template: str | None = None, + license: str | list[str] | None = None, + system: str | None = None, + parameters: Mapping[str, Any] | Options | None = None, + messages: Sequence[Mapping[str, Any] | Message] | None = None, *, stream: Literal[True] = True, ) -> Iterator[ProgressResponse]: ... @@ -510,18 +507,18 @@ def create( def create( self, model: str, - quantize: Optional[str] = None, - from_: Optional[str] = None, - files: Optional[Dict[str, str]] = None, - adapters: Optional[Dict[str, str]] = None, - template: Optional[str] = None, - license: Optional[Union[str, List[str]]] = None, - system: Optional[str] = None, - parameters: Optional[Union[Mapping[str, Any], Options]] = None, - messages: Optional[Sequence[Union[Mapping[str, Any], Message]]] = None, + quantize: str | None = None, + from_: str | None = None, + files: dict[str, str] | None = None, + adapters: dict[str, str] | None = None, + template: str | None = None, + license: str | list[str] | None = None, + system: str | None = None, + parameters: Mapping[str, Any] | Options | None = None, + messages: Sequence[Mapping[str, Any] | Message] | None = None, *, stream: bool = False, - ) -> Union[ProgressResponse, Iterator[ProgressResponse]]: + ) -> ProgressResponse | Iterator[ProgressResponse]: """ Raises `ResponseError` if the request could not be fulfilled. @@ -547,7 +544,7 @@ def create( stream=stream, ) - def create_blob(self, path: Union[str, Path]) -> str: + def create_blob(self, path: str | Path) -> str: sha256sum = sha256() with open(path, 'rb') as r: while True: @@ -614,7 +611,7 @@ def ps(self) -> ProcessResponse: class AsyncClient(BaseClient): - def __init__(self, host: Optional[str] = None, **kwargs) -> None: + def __init__(self, host: str | None = None, **kwargs) -> None: super().__init__(httpx.AsyncClient, host, **kwargs) async def _request_raw(self, *args, **kwargs): @@ -630,7 +627,7 @@ async def _request_raw(self, *args, **kwargs): @overload async def _request( self, - cls: Type[T], + cls: type[T], *args, stream: Literal[False] = False, **kwargs, @@ -639,7 +636,7 @@ async def _request( @overload async def _request( self, - cls: Type[T], + cls: type[T], *args, stream: Literal[True] = True, **kwargs, @@ -648,19 +645,19 @@ async def _request( @overload async def _request( self, - cls: Type[T], + cls: type[T], *args, stream: bool = False, **kwargs, - ) -> Union[T, AsyncIterator[T]]: ... + ) -> T | AsyncIterator[T]: ... async def _request( self, - cls: Type[T], + cls: type[T], *args, stream: bool = False, **kwargs, - ) -> Union[T, AsyncIterator[T]]: + ) -> T | AsyncIterator[T]: if stream: async def inner(): @@ -690,13 +687,13 @@ async def generate( *, system: str = '', template: str = '', - context: Optional[Sequence[int]] = None, + context: Sequence[int] | None = None, stream: Literal[False] = False, raw: bool = False, - format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None, - images: Optional[Sequence[Union[str, bytes]]] = None, - options: Optional[Union[Mapping[str, Any], Options]] = None, - keep_alive: Optional[Union[float, str]] = None, + format: Literal['', 'json'] | JsonSchemaValue | None = None, + images: Sequence[str | bytes] | None = None, + options: Mapping[str, Any] | Options | None = None, + keep_alive: float | str | None = None, ) -> GenerateResponse: ... @overload @@ -708,31 +705,31 @@ async def generate( *, system: str = '', template: str = '', - context: Optional[Sequence[int]] = None, + context: Sequence[int] | None = None, stream: Literal[True] = True, raw: bool = False, - format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None, - images: Optional[Sequence[Union[str, bytes]]] = None, - options: Optional[Union[Mapping[str, Any], Options]] = None, - keep_alive: Optional[Union[float, str]] = None, + format: Literal['', 'json'] | JsonSchemaValue | None = None, + images: Sequence[str | bytes] | None = None, + options: Mapping[str, Any] | Options | None = None, + keep_alive: float | str | None = None, ) -> AsyncIterator[GenerateResponse]: ... async def generate( self, model: str = '', - prompt: Optional[str] = None, - suffix: Optional[str] = None, + prompt: str | None = None, + suffix: str | None = None, *, - system: Optional[str] = None, - template: Optional[str] = None, - context: Optional[Sequence[int]] = None, + system: str | None = None, + template: str | None = None, + context: Sequence[int] | None = None, stream: bool = False, - raw: Optional[bool] = None, - format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None, - images: Optional[Sequence[Union[str, bytes]]] = None, - options: Optional[Union[Mapping[str, Any], Options]] = None, - keep_alive: Optional[Union[float, str]] = None, - ) -> Union[GenerateResponse, AsyncIterator[GenerateResponse]]: + raw: bool | None = None, + format: Literal['', 'json'] | JsonSchemaValue | None = None, + images: Sequence[str | bytes] | None = None, + options: Mapping[str, Any] | Options | None = None, + keep_alive: float | str | None = None, + ) -> GenerateResponse | AsyncIterator[GenerateResponse]: """ Create a response using the requested model. @@ -767,39 +764,39 @@ async def generate( async def chat( self, model: str = '', - messages: Optional[Sequence[Union[Mapping[str, Any], Message]]] = None, + messages: Sequence[Mapping[str, Any] | Message] | None = None, *, - tools: Optional[Sequence[Union[Mapping[str, Any], Tool, Callable]]] = None, + tools: Sequence[Mapping[str, Any] | Tool | Callable] | None = None, stream: Literal[False] = False, - format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None, - options: Optional[Union[Mapping[str, Any], Options]] = None, - keep_alive: Optional[Union[float, str]] = None, + format: Literal['', 'json'] | JsonSchemaValue | None = None, + options: Mapping[str, Any] | Options | None = None, + keep_alive: float | str | None = None, ) -> ChatResponse: ... @overload async def chat( self, model: str = '', - messages: Optional[Sequence[Union[Mapping[str, Any], Message]]] = None, + messages: Sequence[Mapping[str, Any] | Message] | None = None, *, - tools: Optional[Sequence[Union[Mapping[str, Any], Tool, Callable]]] = None, + tools: Sequence[Mapping[str, Any] | Tool | Callable] | None = None, stream: Literal[True] = True, - format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None, - options: Optional[Union[Mapping[str, Any], Options]] = None, - keep_alive: Optional[Union[float, str]] = None, + format: Literal['', 'json'] | JsonSchemaValue | None = None, + options: Mapping[str, Any] | Options | None = None, + keep_alive: float | str | None = None, ) -> AsyncIterator[ChatResponse]: ... async def chat( self, model: str = '', - messages: Optional[Sequence[Union[Mapping[str, Any], Message]]] = None, + messages: Sequence[Mapping[str, Any] | Message] | None = None, *, - tools: Optional[Sequence[Union[Mapping[str, Any], Tool, Callable]]] = None, + tools: Sequence[Mapping[str, Any] | Tool | Callable] | None = None, stream: bool = False, - format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None, - options: Optional[Union[Mapping[str, Any], Options]] = None, - keep_alive: Optional[Union[float, str]] = None, - ) -> Union[ChatResponse, AsyncIterator[ChatResponse]]: + format: Literal['', 'json'] | JsonSchemaValue | None = None, + options: Mapping[str, Any] | Options | None = None, + keep_alive: float | str | None = None, + ) -> ChatResponse | AsyncIterator[ChatResponse]: """ Create a chat response using the requested model. @@ -853,10 +850,10 @@ def add_two_numbers(a: int, b: int) -> int: async def embed( self, model: str = '', - input: Union[str, Sequence[str]] = '', - truncate: Optional[bool] = None, - options: Optional[Union[Mapping[str, Any], Options]] = None, - keep_alive: Optional[Union[float, str]] = None, + input: str | Sequence[str] = '', + truncate: bool | None = None, + options: Mapping[str, Any] | Options | None = None, + keep_alive: float | str | None = None, ) -> EmbedResponse: return await self._request( EmbedResponse, @@ -874,9 +871,9 @@ async def embed( async def embeddings( self, model: str = '', - prompt: Optional[str] = None, - options: Optional[Union[Mapping[str, Any], Options]] = None, - keep_alive: Optional[Union[float, str]] = None, + prompt: str | None = None, + options: Mapping[str, Any] | Options | None = None, + keep_alive: float | str | None = None, ) -> EmbeddingsResponse: """ Deprecated in favor of `embed`. @@ -917,7 +914,7 @@ async def pull( *, insecure: bool = False, stream: bool = False, - ) -> Union[ProgressResponse, AsyncIterator[ProgressResponse]]: + ) -> ProgressResponse | AsyncIterator[ProgressResponse]: """ Raises `ResponseError` if the request could not be fulfilled. @@ -959,7 +956,7 @@ async def push( *, insecure: bool = False, stream: bool = False, - ) -> Union[ProgressResponse, AsyncIterator[ProgressResponse]]: + ) -> ProgressResponse | AsyncIterator[ProgressResponse]: """ Raises `ResponseError` if the request could not be fulfilled. @@ -981,15 +978,15 @@ async def push( async def create( self, model: str, - quantize: Optional[str] = None, - from_: Optional[str] = None, - files: Optional[Dict[str, str]] = None, - adapters: Optional[Dict[str, str]] = None, - template: Optional[str] = None, - license: Optional[Union[str, List[str]]] = None, - system: Optional[str] = None, - parameters: Optional[Union[Mapping[str, Any], Options]] = None, - messages: Optional[Sequence[Union[Mapping[str, Any], Message]]] = None, + quantize: str | None = None, + from_: str | None = None, + files: dict[str, str] | None = None, + adapters: dict[str, str] | None = None, + template: str | None = None, + license: str | list[str] | None = None, + system: str | None = None, + parameters: Mapping[str, Any] | Options | None = None, + messages: Sequence[Mapping[str, Any] | Message] | None = None, *, stream: Literal[True] = True, ) -> ProgressResponse: ... @@ -998,15 +995,15 @@ async def create( async def create( self, model: str, - quantize: Optional[str] = None, - from_: Optional[str] = None, - files: Optional[Dict[str, str]] = None, - adapters: Optional[Dict[str, str]] = None, - template: Optional[str] = None, - license: Optional[Union[str, List[str]]] = None, - system: Optional[str] = None, - parameters: Optional[Union[Mapping[str, Any], Options]] = None, - messages: Optional[Sequence[Union[Mapping[str, Any], Message]]] = None, + quantize: str | None = None, + from_: str | None = None, + files: dict[str, str] | None = None, + adapters: dict[str, str] | None = None, + template: str | None = None, + license: str | list[str] | None = None, + system: str | None = None, + parameters: Mapping[str, Any] | Options | None = None, + messages: Sequence[Mapping[str, Any] | Message] | None = None, *, stream: Literal[True] = True, ) -> AsyncIterator[ProgressResponse]: ... @@ -1014,18 +1011,18 @@ async def create( async def create( self, model: str, - quantize: Optional[str] = None, - from_: Optional[str] = None, - files: Optional[Dict[str, str]] = None, - adapters: Optional[Dict[str, str]] = None, - template: Optional[str] = None, - license: Optional[Union[str, List[str]]] = None, - system: Optional[str] = None, - parameters: Optional[Union[Mapping[str, Any], Options]] = None, - messages: Optional[Sequence[Union[Mapping[str, Any], Message]]] = None, + quantize: str | None = None, + from_: str | None = None, + files: dict[str, str] | None = None, + adapters: dict[str, str] | None = None, + template: str | None = None, + license: str | list[str] | None = None, + system: str | None = None, + parameters: Mapping[str, Any] | Options | None = None, + messages: Sequence[Mapping[str, Any] | Message] | None = None, *, stream: bool = False, - ) -> Union[ProgressResponse, AsyncIterator[ProgressResponse]]: + ) -> ProgressResponse | AsyncIterator[ProgressResponse]: """ Raises `ResponseError` if the request could not be fulfilled. @@ -1052,7 +1049,7 @@ async def create( stream=stream, ) - async def create_blob(self, path: Union[str, Path]) -> str: + async def create_blob(self, path: str | Path) -> str: sha256sum = sha256() with open(path, 'rb') as r: while True: @@ -1125,24 +1122,24 @@ async def ps(self) -> ProcessResponse: ) -def _copy_images(images: Optional[Sequence[Union[Image, Any]]]) -> Iterator[Image]: +def _copy_images(images: Sequence[Image | Any] | None) -> Iterator[Image]: for image in images or []: yield image if isinstance(image, Image) else Image(value=image) -def _copy_messages(messages: Optional[Sequence[Union[Mapping[str, Any], Message]]]) -> Iterator[Message]: +def _copy_messages(messages: Sequence[Mapping[str, Any] | Message] | None) -> Iterator[Message]: for message in messages or []: yield Message.model_validate( {k: [image for image in _copy_images(v)] if k == 'images' else v for k, v in dict(message).items() if v}, ) -def _copy_tools(tools: Optional[Sequence[Union[Mapping[str, Any], Tool, Callable]]] = None) -> Iterator[Tool]: +def _copy_tools(tools: Sequence[Mapping[str, Any] | Tool | Callable] | None = None) -> Iterator[Tool]: for unprocessed_tool in tools or []: yield convert_function_to_tool(unprocessed_tool) if callable(unprocessed_tool) else Tool.model_validate(unprocessed_tool) -def _as_path(s: Optional[Union[str, PathLike]]) -> Union[Path, None]: +def _as_path(s: str | PathLike | None) -> Path | None: if isinstance(s, str) or isinstance(s, Path): try: if (p := Path(s)).exists(): @@ -1152,7 +1149,7 @@ def _as_path(s: Optional[Union[str, PathLike]]) -> Union[Path, None]: return None -def _parse_host(host: Optional[str]) -> str: +def _parse_host(host: str | None) -> str: """ >>> _parse_host(None) 'http://127.0.0.1:11434' diff --git a/ollama/_types.py b/ollama/_types.py index 710c536..8715e6b 100644 --- a/ollama/_types.py +++ b/ollama/_types.py @@ -1,8 +1,10 @@ +from __future__ import annotations + import json from base64 import b64decode, b64encode from datetime import datetime from pathlib import Path -from typing import Any, Dict, List, Mapping, Optional, Sequence, Union +from typing import Any, Mapping, Sequence from pydantic import ( BaseModel, @@ -102,38 +104,38 @@ def get(self, key: str, default: Any = None) -> Any: class Options(SubscriptableBaseModel): # load time options - numa: Optional[bool] = None - num_ctx: Optional[int] = None - num_batch: Optional[int] = None - num_gpu: Optional[int] = None - main_gpu: Optional[int] = None - low_vram: Optional[bool] = None - f16_kv: Optional[bool] = None - logits_all: Optional[bool] = None - vocab_only: Optional[bool] = None - use_mmap: Optional[bool] = None - use_mlock: Optional[bool] = None - embedding_only: Optional[bool] = None - num_thread: Optional[int] = None + numa: bool | None = None + num_ctx: int | None = None + num_batch: int | None = None + num_gpu: int | None = None + main_gpu: int | None = None + low_vram: bool | None = None + f16_kv: bool | None = None + logits_all: bool | None = None + vocab_only: bool | None = None + use_mmap: bool | None = None + use_mlock: bool | None = None + embedding_only: bool | None = None + num_thread: int | None = None # runtime options - num_keep: Optional[int] = None - seed: Optional[int] = None - num_predict: Optional[int] = None - top_k: Optional[int] = None - top_p: Optional[float] = None - tfs_z: Optional[float] = None - typical_p: Optional[float] = None - repeat_last_n: Optional[int] = None - temperature: Optional[float] = None - repeat_penalty: Optional[float] = None - presence_penalty: Optional[float] = None - frequency_penalty: Optional[float] = None - mirostat: Optional[int] = None - mirostat_tau: Optional[float] = None - mirostat_eta: Optional[float] = None - penalize_newline: Optional[bool] = None - stop: Optional[Sequence[str]] = None + num_keep: int | None = None + seed: int | None = None + num_predict: int | None = None + top_k: int | None = None + top_p: float | None = None + tfs_z: float | None = None + typical_p: float | None = None + repeat_last_n: int | None = None + temperature: float | None = None + repeat_penalty: float | None = None + presence_penalty: float | None = None + frequency_penalty: float | None = None + mirostat: int | None = None + mirostat_tau: float | None = None + mirostat_eta: float | None = None + penalize_newline: bool | None = None + stop: Sequence[str] | None = None class BaseRequest(SubscriptableBaseModel): @@ -142,23 +144,23 @@ class BaseRequest(SubscriptableBaseModel): class BaseStreamableRequest(BaseRequest): - stream: Optional[bool] = None + stream: bool | None = None 'Stream response.' class BaseGenerateRequest(BaseStreamableRequest): - options: Optional[Union[Mapping[str, Any], Options]] = None + options: Mapping[str, Any] | Options | None = None 'Options to use for the request.' - format: Optional[Union[Literal['', 'json'], JsonSchemaValue]] = None + format: Literal['', 'json'] | JsonSchemaValue | None = None 'Format of the response.' - keep_alive: Optional[Union[float, str]] = None + keep_alive: float | str | None = None 'Keep model alive for the specified duration.' class Image(BaseModel): - value: Union[str, bytes, Path] + value: str | bytes | Path @model_serializer def serialize_model(self): @@ -186,56 +188,56 @@ def serialize_model(self): class GenerateRequest(BaseGenerateRequest): - prompt: Optional[str] = None + prompt: str | None = None 'Prompt to generate response from.' - suffix: Optional[str] = None + suffix: str | None = None 'Suffix to append to the response.' - system: Optional[str] = None + system: str | None = None 'System prompt to prepend to the prompt.' - template: Optional[str] = None + template: str | None = None 'Template to use for the response.' - context: Optional[Sequence[int]] = None + context: Sequence[int] | None = None 'Tokenized history to use for the response.' - raw: Optional[bool] = None + raw: bool | None = None - images: Optional[Sequence[Image]] = None + images: Sequence[Image] | None = None 'Image data for multimodal models.' class BaseGenerateResponse(SubscriptableBaseModel): - model: Optional[str] = None + model: str | None = None 'Model used to generate response.' - created_at: Optional[str] = None + created_at: str | None = None 'Time when the request was created.' - done: Optional[bool] = None + done: bool | None = None 'True if response is complete, otherwise False. Useful for streaming to detect the final response.' - done_reason: Optional[str] = None + done_reason: str | None = None 'Reason for completion. Only present when done is True.' - total_duration: Optional[int] = None + total_duration: int | None = None 'Total duration in nanoseconds.' - load_duration: Optional[int] = None + load_duration: int | None = None 'Load duration in nanoseconds.' - prompt_eval_count: Optional[int] = None + prompt_eval_count: int | None = None 'Number of tokens evaluated in the prompt.' - prompt_eval_duration: Optional[int] = None + prompt_eval_duration: int | None = None 'Duration of evaluating the prompt in nanoseconds.' - eval_count: Optional[int] = None + eval_count: int | None = None 'Number of tokens evaluated in inference.' - eval_duration: Optional[int] = None + eval_duration: int | None = None 'Duration of evaluating inference in nanoseconds.' @@ -247,7 +249,7 @@ class GenerateResponse(BaseGenerateResponse): response: str 'Response content. When streaming, this contains a fragment of the response.' - context: Optional[Sequence[int]] = None + context: Sequence[int] | None = None 'Tokenized history up to the point of the response.' @@ -259,10 +261,10 @@ class Message(SubscriptableBaseModel): role: Literal['user', 'assistant', 'system', 'tool'] "Assumed role of the message. Response messages has role 'assistant' or 'tool'." - content: Optional[str] = None + content: str | None = None 'Content of the message. Response messages contains message fragments when streaming.' - images: Optional[Sequence[Image]] = None + images: Sequence[Image] | None = None """ Optional list of image data for multimodal models. @@ -293,41 +295,41 @@ class Function(SubscriptableBaseModel): function: Function 'Function to be called.' - tool_calls: Optional[Sequence[ToolCall]] = None + tool_calls: Sequence[ToolCall] | None = None """ Tools calls to be made by the model. """ class Tool(SubscriptableBaseModel): - type: Optional[Literal['function']] = 'function' + type: Literal['function'] | None = 'function' class Function(SubscriptableBaseModel): - name: Optional[str] = None - description: Optional[str] = None + name: str | None = None + description: str | None = None class Parameters(SubscriptableBaseModel): - type: Optional[Literal['object']] = 'object' - required: Optional[Sequence[str]] = None + type: Literal['object'] | None = 'object' + required: Sequence[str] | None = None class Property(SubscriptableBaseModel): model_config = ConfigDict(arbitrary_types_allowed=True) - type: Optional[str] = None - description: Optional[str] = None + type: str | None = None + description: str | None = None - properties: Optional[Mapping[str, Property]] = None + properties: Mapping[str, Property] | None = None - parameters: Optional[Parameters] = None + parameters: Parameters | None = None - function: Optional[Function] = None + function: Function | None = None class ChatRequest(BaseGenerateRequest): - messages: Optional[Sequence[Union[Mapping[str, Any], Message]]] = None + messages: Sequence[Mapping[str, Any] | Message] | None = None 'Messages to chat with.' - tools: Optional[Sequence[Tool]] = None + tools: Sequence[Tool] | None = None 'Tools to use for the chat.' @@ -341,16 +343,16 @@ class ChatResponse(BaseGenerateResponse): class EmbedRequest(BaseRequest): - input: Union[str, Sequence[str]] + input: str | Sequence[str] 'Input text to embed.' - truncate: Optional[bool] = None + truncate: bool | None = None 'Truncate the input to the maximum token length.' - options: Optional[Union[Mapping[str, Any], Options]] = None + options: Mapping[str, Any] | Options | None = None 'Options to use for the request.' - keep_alive: Optional[Union[float, str]] = None + keep_alive: float | str | None = None class EmbedResponse(BaseGenerateResponse): @@ -363,13 +365,13 @@ class EmbedResponse(BaseGenerateResponse): class EmbeddingsRequest(BaseRequest): - prompt: Optional[str] = None + prompt: str | None = None 'Prompt to generate embeddings from.' - options: Optional[Union[Mapping[str, Any], Options]] = None + options: Mapping[str, Any] | Options | None = None 'Options to use for the request.' - keep_alive: Optional[Union[float, str]] = None + keep_alive: float | str | None = None class EmbeddingsResponse(SubscriptableBaseModel): @@ -386,7 +388,7 @@ class PullRequest(BaseStreamableRequest): Request to pull the model. """ - insecure: Optional[bool] = None + insecure: bool | None = None 'Allow insecure (HTTP) connections.' @@ -395,7 +397,7 @@ class PushRequest(BaseStreamableRequest): Request to pull the model. """ - insecure: Optional[bool] = None + insecure: bool | None = None 'Allow insecure (HTTP) connections.' @@ -410,33 +412,33 @@ def serialize_model(self, nxt): """ Request to create a new model. """ - quantize: Optional[str] = None - from_: Optional[str] = None - files: Optional[Dict[str, str]] = None - adapters: Optional[Dict[str, str]] = None - template: Optional[str] = None - license: Optional[Union[str, List[str]]] = None - system: Optional[str] = None - parameters: Optional[Union[Mapping[str, Any], Options]] = None - messages: Optional[Sequence[Union[Mapping[str, Any], Message]]] = None + quantize: str | None = None + from_: str | None = None + files: dict[str, str] | None = None + adapters: dict[str, str] | None = None + template: str | None = None + license: str | list[str] | None = None + system: str | None = None + parameters: Mapping[str, Any] | Options | None = None + messages: Sequence[Mapping[str, Any] | Message] | None = None class ModelDetails(SubscriptableBaseModel): - parent_model: Optional[str] = None - format: Optional[str] = None - family: Optional[str] = None - families: Optional[Sequence[str]] = None - parameter_size: Optional[str] = None - quantization_level: Optional[str] = None + parent_model: str | None = None + format: str | None = None + family: str | None = None + families: Sequence[str] | None = None + parameter_size: str | None = None + quantization_level: str | None = None class ListResponse(SubscriptableBaseModel): class Model(SubscriptableBaseModel): - model: Optional[str] = None - modified_at: Optional[datetime] = None - digest: Optional[str] = None - size: Optional[ByteSize] = None - details: Optional[ModelDetails] = None + model: str | None = None + modified_at: datetime | None = None + digest: str | None = None + size: ByteSize | None = None + details: ModelDetails | None = None models: Sequence[Model] 'List of models.' @@ -461,13 +463,13 @@ class CopyRequest(BaseModel): class StatusResponse(SubscriptableBaseModel): - status: Optional[str] = None + status: str | None = None class ProgressResponse(StatusResponse): - completed: Optional[int] = None - total: Optional[int] = None - digest: Optional[str] = None + completed: int | None = None + total: int | None = None + digest: str | None = None class ShowRequest(BaseRequest): @@ -477,30 +479,30 @@ class ShowRequest(BaseRequest): class ShowResponse(SubscriptableBaseModel): - modified_at: Optional[datetime] = None + modified_at: datetime | None = None - template: Optional[str] = None + template: str | None = None - modelfile: Optional[str] = None + modelfile: str | None = None - license: Optional[str] = None + license: str | None = None - details: Optional[ModelDetails] = None + details: ModelDetails | None = None - modelinfo: Optional[Mapping[str, Any]] = Field(alias='model_info') + modelinfo: Mapping[str, Any] | None = Field(alias='model_info') - parameters: Optional[str] = None + parameters: str | None = None class ProcessResponse(SubscriptableBaseModel): class Model(SubscriptableBaseModel): - model: Optional[str] = None - name: Optional[str] = None - digest: Optional[str] = None - expires_at: Optional[datetime] = None - size: Optional[ByteSize] = None - size_vram: Optional[ByteSize] = None - details: Optional[ModelDetails] = None + model: str | None = None + name: str | None = None + digest: str | None = None + expires_at: datetime | None = None + size: ByteSize | None = None + size_vram: ByteSize | None = None + details: ModelDetails | None = None models: Sequence[Model] diff --git a/ollama/_utils.py b/ollama/_utils.py index 2ea58ea..74bf2fd 100644 --- a/ollama/_utils.py +++ b/ollama/_utils.py @@ -3,14 +3,14 @@ import inspect import re from collections import defaultdict -from typing import Callable, Union +from typing import Callable import pydantic from ollama._types import Tool -def _parse_docstring(doc_string: Union[str, None]) -> dict[str, str]: +def _parse_docstring(doc_string: str | None) -> dict[str, str]: parsed_docstring = defaultdict(str) if not doc_string: return parsed_docstring diff --git a/poetry.lock b/poetry.lock index d896857..1671db4 100644 --- a/poetry.lock +++ b/poetry.lock @@ -151,6 +151,22 @@ tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.1 [package.extras] toml = ["tomli"] +[[package]] +name = "eval-type-backport" +version = "0.2.2" +description = "Like `typing._eval_type`, but lets older Python versions use newer typing features." +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "python_version <= \"3.10\"" +files = [ + {file = "eval_type_backport-0.2.2-py3-none-any.whl", hash = "sha256:cb6ad7c393517f476f96d456d0412ea80f0a8cf96f6892834cd9340149111b0a"}, + {file = "eval_type_backport-0.2.2.tar.gz", hash = "sha256:f0576b4cf01ebb5bd358d02314d31846af5e07678387486e2c798af0e7d849c1"}, +] + +[package.extras] +tests = ["pytest"] + [[package]] name = "exceptiongroup" version = "1.2.2" @@ -639,4 +655,4 @@ watchdog = ["watchdog (>=2.3)"] [metadata] lock-version = "2.1" python-versions = "^3.8" -content-hash = "ff70c562172e38058111703d8219e18a0ed367e8e3ff647d7eff457d3bf3204e" +content-hash = "827c087b6a50d028881fc4eb4267f49a31406dd1289ce9485602d839bcc0584b" diff --git a/pyproject.toml b/pyproject.toml index fc98103..42acaa4 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -12,6 +12,7 @@ repository = "https://github.com/ollama/ollama-python" python = "^3.8" httpx = ">=0.27,<0.29" pydantic = "^2.9.0" +eval_type_backport = { version = "^0.2.2", markers = "python_version <= '3.10'" } [tool.poetry.requires-plugins] poetry-plugin-export = ">=1.8" diff --git a/requirements.txt b/requirements.txt index b7187f8..4a6bfca 100644 --- a/requirements.txt +++ b/requirements.txt @@ -7,6 +7,9 @@ anyio==4.5.2 ; python_version >= "3.8" and python_version < "4.0" \ certifi==2024.8.30 ; python_version >= "3.8" and python_version < "4.0" \ --hash=sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8 \ --hash=sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9 +eval-type-backport==0.2.2 ; python_version >= "3.8" and python_version <= "3.10" \ + --hash=sha256:cb6ad7c393517f476f96d456d0412ea80f0a8cf96f6892834cd9340149111b0a \ + --hash=sha256:f0576b4cf01ebb5bd358d02314d31846af5e07678387486e2c798af0e7d849c1 exceptiongroup==1.2.2 ; python_version >= "3.8" and python_version < "3.11" \ --hash=sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b \ --hash=sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc