mirror of
https://github.com/Mintplex-Labs/anything-llm
synced 2026-04-25 17:15:37 +02:00
feat: add optional API key support for Lemonade provider (#5281)
* add API key param to Lemonade LLM Provider and Embedding Provider * add LEMONADE_LLM_API_KEY to .env.example * add api key to aibitat provider * fix api key from being sent to frontend * fix tooltip id * add null fallback for `apiKey` * remove console log * add missing api keys --------- Co-authored-by: Timothy Carambat <rambat1010@gmail.com>
This commit is contained in:
@@ -180,6 +180,7 @@ SIG_SALT='salt' # Please generate random string at least 32 chars long.
|
||||
# LEMONADE_LLM_BASE_PATH='http://127.0.0.1:8000'
|
||||
# LEMONADE_LLM_MODEL_PREF='Llama-3.2-1B-Instruct-GGUF'
|
||||
# LEMONADE_LLM_MODEL_TOKEN_LIMIT=8192
|
||||
# LEMONADE_LLM_API_KEY=
|
||||
|
||||
###########################################
|
||||
######## Embedding API SElECTION ##########
|
||||
@@ -452,4 +453,4 @@ TTS_PROVIDER="native"
|
||||
# Set to "true" to enable. This can reduce token costs by 80% when you have
|
||||
# many tools/MCP servers enabled.
|
||||
# AGENT_SKILL_RERANKER_ENABLED="true"
|
||||
# AGENT_SKILL_RERANKER_TOP_N=15 # (optional) Number of top tools to keep after reranking (default: 15)
|
||||
# AGENT_SKILL_RERANKER_TOP_N=15 # (optional) Number of top tools to keep after reranking (default: 15)
|
||||
|
||||
@@ -34,6 +34,9 @@ function lemonadeUtilsEndpoints(app) {
|
||||
const lemonadeResponse = await fetch(lemonadeUrl.toString(), {
|
||||
method: "POST",
|
||||
headers: {
|
||||
...(!!process.env.LEMONADE_LLM_API_KEY
|
||||
? { Authorization: `Bearer ${process.env.LEMONADE_LLM_API_KEY}` }
|
||||
: {}),
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify({
|
||||
@@ -129,6 +132,9 @@ function lemonadeUtilsEndpoints(app) {
|
||||
const lemonadeResponse = await fetch(lemonadeUrl.toString(), {
|
||||
method: "POST",
|
||||
headers: {
|
||||
...(!!process.env.LEMONADE_LLM_API_KEY
|
||||
? { Authorization: `Bearer ${process.env.LEMONADE_LLM_API_KEY}` }
|
||||
: {}),
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify({
|
||||
|
||||
@@ -714,6 +714,7 @@ const SystemSettings = {
|
||||
|
||||
// Lemonade Keys
|
||||
LemonadeLLMBasePath: process.env.LEMONADE_LLM_BASE_PATH,
|
||||
LemonadeLLMApiKey: !!process.env.LEMONADE_LLM_API_KEY,
|
||||
LemonadeLLMModelPref: process.env.LEMONADE_LLM_MODEL_PREF,
|
||||
LemonadeLLMModelTokenLimit:
|
||||
process.env.LEMONADE_LLM_MODEL_TOKEN_LIMIT || 8192,
|
||||
|
||||
@@ -22,7 +22,7 @@ class LemonadeLLM {
|
||||
process.env.LEMONADE_LLM_BASE_PATH,
|
||||
"openai"
|
||||
),
|
||||
apiKey: null,
|
||||
apiKey: process.env.LEMONADE_LLM_API_KEY ?? null,
|
||||
});
|
||||
|
||||
this.model = modelPreference || process.env.LEMONADE_LLM_MODEL_PREF;
|
||||
@@ -202,7 +202,7 @@ class LemonadeLLM {
|
||||
process.env.LEMONADE_LLM_BASE_PATH,
|
||||
"openai"
|
||||
),
|
||||
apiKey: null,
|
||||
apiKey: process.env.LEMONADE_LLM_API_KEY ?? null,
|
||||
});
|
||||
|
||||
const { labels = [] } = await client.models.retrieve(this.model);
|
||||
@@ -233,14 +233,17 @@ class LemonadeLLM {
|
||||
const endpoint = new URL(parseLemonadeServerEndpoint(basePath, "openai"));
|
||||
endpoint.pathname += "/load";
|
||||
|
||||
console.log(endpoint.toString());
|
||||
|
||||
LemonadeLLM.slog(
|
||||
`Loading model ${model} with context size ${this.promptWindowLimit()}`
|
||||
);
|
||||
await fetch(endpoint.toString(), {
|
||||
method: "POST",
|
||||
headers: { "Content-Type": "application/json" },
|
||||
headers: {
|
||||
...(process.env.LEMONADE_LLM_API_KEY
|
||||
? { Authorization: `Bearer ${process.env.LEMONADE_LLM_API_KEY}` }
|
||||
: {}),
|
||||
"Content-Type": "application/json",
|
||||
},
|
||||
body: JSON.stringify({
|
||||
model_name: String(model),
|
||||
ctx_size: Number(this.promptWindowLimit()),
|
||||
@@ -343,7 +346,14 @@ async function getAllLemonadeModels(basePath = null, task = "chat") {
|
||||
);
|
||||
lemonadeUrl.pathname += "/models";
|
||||
lemonadeUrl.searchParams.append("show_all", "true");
|
||||
await fetch(lemonadeUrl.toString())
|
||||
|
||||
await fetch(lemonadeUrl.toString(), {
|
||||
headers: {
|
||||
...(!!process.env.LEMONADE_LLM_API_KEY
|
||||
? { Authorization: `Bearer ${process.env.LEMONADE_LLM_API_KEY}` }
|
||||
: {}),
|
||||
},
|
||||
})
|
||||
.then((res) => res.json())
|
||||
.then(({ data }) => {
|
||||
data?.forEach((model) => {
|
||||
|
||||
@@ -13,7 +13,7 @@ class LemonadeEmbedder {
|
||||
process.env.EMBEDDING_BASE_PATH,
|
||||
"openai"
|
||||
),
|
||||
apiKey: null,
|
||||
apiKey: process.env.LEMONADE_LLM_API_KEY ?? null,
|
||||
});
|
||||
this.model = process.env.EMBEDDING_MODEL_PREF;
|
||||
|
||||
|
||||
@@ -402,7 +402,7 @@ class Provider {
|
||||
configuration: {
|
||||
baseURL: process.env.LEMONADE_LLM_BASE_PATH,
|
||||
},
|
||||
apiKey: null,
|
||||
apiKey: process.env.LEMONADE_LLM_API_KEY ?? null,
|
||||
...config,
|
||||
});
|
||||
default:
|
||||
|
||||
@@ -27,7 +27,7 @@ class LemonadeProvider extends InheritMultiple([Provider, UnTooled]) {
|
||||
process.env.LEMONADE_LLM_BASE_PATH,
|
||||
"openai"
|
||||
),
|
||||
apiKey: null,
|
||||
apiKey: process.env.LEMONADE_LLM_API_KEY ?? null,
|
||||
maxRetries: 3,
|
||||
});
|
||||
|
||||
|
||||
@@ -829,6 +829,10 @@ const KEY_MAPPING = {
|
||||
envKey: "LEMONADE_LLM_BASE_PATH",
|
||||
checks: [isValidURL],
|
||||
},
|
||||
LemonadeLLMApiKey: {
|
||||
envKey: "LEMONADE_LLM_API_KEY",
|
||||
checks: [],
|
||||
},
|
||||
LemonadeLLMModelPref: {
|
||||
envKey: "LEMONADE_LLM_MODEL_PREF",
|
||||
checks: [isNotEmpty],
|
||||
|
||||
Reference in New Issue
Block a user