diff --git a/Cargo.lock b/Cargo.lock index 5e43bf4..1c0ca32 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -343,7 +343,7 @@ checksum = "baf1de4339761588bc0619e3cbc0120ee582ebb74b53b4efbf79117bd2da40fd" [[package]] name = "chat-prompts" -version = "0.11.1" +version = "0.11.2" dependencies = [ "base64 0.22.0", "clap", @@ -530,7 +530,7 @@ dependencies = [ [[package]] name = "endpoints" -version = "0.12.0" +version = "0.13.0" dependencies = [ "indexmap", "serde", @@ -1150,7 +1150,7 @@ checksum = "78b3ae25bc7c8c38cec158d1f2757ee79e9b3740fbc7ccf0e59e4b08d793fa89" [[package]] name = "llama-core" -version = "0.14.1" +version = "0.15.0" dependencies = [ "base64 0.22.0", "chat-prompts", diff --git a/Cargo.toml b/Cargo.toml index c4e4ffe..c7c866a 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -4,9 +4,9 @@ version = "0.8.2" edition = "2021" [dependencies] -endpoints = { version = "=0.12.0", path = "/Volumes/Dev/secondstate/me/LlamaEdge/api-server/endpoints" } -chat-prompts = { version = "=0.11.1", path = "/Volumes/Dev/secondstate/me/LlamaEdge/api-server/chat-prompts" } -llama-core = { version = "=0.14.1", features = [ +endpoints = { version = "=0.13.0", path = "/Volumes/Dev/secondstate/me/LlamaEdge/api-server/endpoints" } +chat-prompts = { version = "=0.11.2", path = "/Volumes/Dev/secondstate/me/LlamaEdge/api-server/chat-prompts" } +llama-core = { version = "=0.15.0", features = [ "logging", ], path = "/Volumes/Dev/secondstate/me/LlamaEdge/api-server/llama-core" } futures = { version = "0.3.6", default-features = false, features = ["async-await", "std"] }