mirror of
https://github.com/Mintplex-Labs/anything-llm
synced 2026-04-25 17:15:37 +02:00
Fix: Azure OpenAI model key collision (#5092)
* fix: Migrate AzureOpenAI model key from OPEN_MODEL_PREF to prevent the naming collision. No effort necessary from current users. * test: add backwards compat tests for AzureOpenAI model key migration * patch missing env example file * linting --------- Co-authored-by: Timothy Carambat <rambat1010@gmail.com>
This commit is contained in:
@@ -21,7 +21,7 @@ GID='1000'
|
||||
# LLM_PROVIDER='azure'
|
||||
# AZURE_OPENAI_ENDPOINT=
|
||||
# AZURE_OPENAI_KEY=
|
||||
# OPEN_MODEL_PREF='my-gpt35-deployment' # This is the "deployment" on Azure you want to use. Not the base model.
|
||||
# AZURE_OPENAI_MODEL_PREF='my-gpt35-deployment' # This is the "deployment" on Azure you want to use. Not the base model.
|
||||
# EMBEDDING_MODEL_PREF='embedder-model' # This is the "deployment" on Azure you want to use for embeddings. Not the base model. Valid base model is text-embedding-ada-002
|
||||
|
||||
# LLM_PROVIDER='anthropic'
|
||||
|
||||
@@ -18,7 +18,7 @@ SIG_SALT='salt' # Please generate random string at least 32 chars long.
|
||||
# LLM_PROVIDER='azure'
|
||||
# AZURE_OPENAI_ENDPOINT=
|
||||
# AZURE_OPENAI_KEY=
|
||||
# OPEN_MODEL_PREF='my-gpt35-deployment' # This is the "deployment" on Azure you want to use. Not the base model.
|
||||
# AZURE_OPENAI_MODEL_PREF='my-gpt35-deployment' # This is the "deployment" on Azure you want to use. Not the base model.
|
||||
# EMBEDDING_MODEL_PREF='embedder-model' # This is the "deployment" on Azure you want to use for embeddings. Not the base model. Valid base model is text-embedding-ada-002
|
||||
|
||||
# LLM_PROVIDER='anthropic'
|
||||
|
||||
55
server/__tests__/utils/helpers/azureOpenAiModelPref.test.js
Normal file
55
server/__tests__/utils/helpers/azureOpenAiModelPref.test.js
Normal file
@@ -0,0 +1,55 @@
|
||||
/* eslint-env jest */
|
||||
|
||||
/**
|
||||
* Tests for the AzureOpenAI model key migration from OPEN_MODEL_PREF
|
||||
* to AZURE_OPENAI_MODEL_PREF, ensuring backwards compatibility for
|
||||
* existing users who have OPEN_MODEL_PREF set.
|
||||
*
|
||||
* Related issue: https://github.com/Mintplex-Labs/anything-llm/issues/3839
|
||||
*/
|
||||
|
||||
describe("AzureOpenAI model key backwards compatibility", () => {
|
||||
const ORIGINAL_ENV = process.env;
|
||||
|
||||
beforeEach(() => {
|
||||
jest.resetModules();
|
||||
process.env = { ...ORIGINAL_ENV };
|
||||
delete process.env.AZURE_OPENAI_MODEL_PREF;
|
||||
delete process.env.OPEN_MODEL_PREF;
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
process.env = ORIGINAL_ENV;
|
||||
});
|
||||
|
||||
describe("getBaseLLMProviderModel - helpers/index.js", () => {
|
||||
test("returns AZURE_OPENAI_MODEL_PREF when set", () => {
|
||||
process.env.AZURE_OPENAI_MODEL_PREF = "my-azure-deployment";
|
||||
process.env.OPEN_MODEL_PREF = "gpt-4o";
|
||||
const { getBaseLLMProviderModel } = require("../../../utils/helpers/index");
|
||||
expect(getBaseLLMProviderModel({ provider: "azure" })).toBe("my-azure-deployment");
|
||||
});
|
||||
|
||||
test("falls back to OPEN_MODEL_PREF when AZURE_OPENAI_MODEL_PREF is not set (backwards compat)", () => {
|
||||
process.env.OPEN_MODEL_PREF = "my-old-azure-deployment";
|
||||
const { getBaseLLMProviderModel } = require("../../../utils/helpers/index");
|
||||
expect(getBaseLLMProviderModel({ provider: "azure" })).toBe("my-old-azure-deployment");
|
||||
});
|
||||
|
||||
test("openai provider still uses OPEN_MODEL_PREF exclusively", () => {
|
||||
process.env.OPEN_MODEL_PREF = "gpt-4o";
|
||||
process.env.AZURE_OPENAI_MODEL_PREF = "my-azure-deployment";
|
||||
const { getBaseLLMProviderModel } = require("../../../utils/helpers/index");
|
||||
expect(getBaseLLMProviderModel({ provider: "openai" })).toBe("gpt-4o");
|
||||
});
|
||||
|
||||
test("azure and openai return different values when both keys are set", () => {
|
||||
process.env.OPEN_MODEL_PREF = "gpt-4o";
|
||||
process.env.AZURE_OPENAI_MODEL_PREF = "my-azure-deployment";
|
||||
const { getBaseLLMProviderModel } = require("../../../utils/helpers/index");
|
||||
expect(getBaseLLMProviderModel({ provider: "azure" })).not.toBe(
|
||||
getBaseLLMProviderModel({ provider: "openai" })
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -97,7 +97,8 @@ function getModelTag() {
|
||||
model = process.env.TOGETHER_AI_MODEL_PREF;
|
||||
break;
|
||||
case "azure":
|
||||
model = process.env.OPEN_MODEL_PREF;
|
||||
model =
|
||||
process.env.AZURE_OPENAI_MODEL_PREF || process.env.OPEN_MODEL_PREF;
|
||||
break;
|
||||
case "koboldcpp":
|
||||
model = process.env.KOBOLD_CPP_MODEL_PREF;
|
||||
|
||||
@@ -508,7 +508,8 @@ const SystemSettings = {
|
||||
// Azure + OpenAI Keys
|
||||
AzureOpenAiEndpoint: process.env.AZURE_OPENAI_ENDPOINT,
|
||||
AzureOpenAiKey: !!process.env.AZURE_OPENAI_KEY,
|
||||
AzureOpenAiModelPref: process.env.OPEN_MODEL_PREF,
|
||||
AzureOpenAiModelPref:
|
||||
process.env.AZURE_OPENAI_MODEL_PREF || process.env.OPEN_MODEL_PREF,
|
||||
AzureOpenAiEmbeddingModelPref: process.env.EMBEDDING_MODEL_PREF,
|
||||
AzureOpenAiTokenLimit: process.env.AZURE_OPENAI_TOKEN_LIMIT || 4096,
|
||||
AzureOpenAiModelType: process.env.AZURE_OPENAI_MODEL_TYPE || "default",
|
||||
|
||||
@@ -20,7 +20,10 @@ class AzureOpenAiLLM {
|
||||
apiKey: process.env.AZURE_OPENAI_KEY,
|
||||
baseURL: AzureOpenAiLLM.formatBaseUrl(process.env.AZURE_OPENAI_ENDPOINT),
|
||||
});
|
||||
this.model = modelPreference ?? process.env.OPEN_MODEL_PREF;
|
||||
this.model =
|
||||
modelPreference ||
|
||||
process.env.AZURE_OPENAI_MODEL_PREF ||
|
||||
process.env.OPEN_MODEL_PREF;
|
||||
/*
|
||||
Note: Azure OpenAI deployments do not expose model metadata that would allow us to
|
||||
programmatically detect whether the deployment uses a reasoning model (o1, o1-mini, o3-mini, etc.).
|
||||
@@ -150,7 +153,7 @@ class AzureOpenAiLLM {
|
||||
async getChatCompletion(messages = [], { temperature = 0.7 }) {
|
||||
if (!this.model)
|
||||
throw new Error(
|
||||
"No OPEN_MODEL_PREF ENV defined. This must the name of a deployment on your Azure account for an LLM chat model like GPT-3.5."
|
||||
"No AZURE_OPENAI_MODEL_PREF ENV defined. This must the name of a deployment on your Azure account for an LLM chat model like GPT-3.5."
|
||||
);
|
||||
|
||||
const result = await LLMPerformanceMonitor.measureAsyncFunction(
|
||||
@@ -185,7 +188,7 @@ class AzureOpenAiLLM {
|
||||
async streamGetChatCompletion(messages = [], { temperature = 0.7 }) {
|
||||
if (!this.model)
|
||||
throw new Error(
|
||||
"No OPEN_MODEL_PREF ENV defined. This must the name of a deployment on your Azure account for an LLM chat model like GPT-3.5."
|
||||
"No AZURE_OPENAI_MODEL_PREF ENV defined. This must the name of a deployment on your Azure account for an LLM chat model like GPT-3.5."
|
||||
);
|
||||
|
||||
const measuredStreamRequest = await LLMPerformanceMonitor.measureStream({
|
||||
|
||||
@@ -17,7 +17,10 @@ class AzureOpenAiProvider extends Provider {
|
||||
baseURL: AzureOpenAiLLM.formatBaseUrl(process.env.AZURE_OPENAI_ENDPOINT),
|
||||
});
|
||||
super(client);
|
||||
this.model = config.model ?? process.env.OPEN_MODEL_PREF;
|
||||
this.model =
|
||||
config.model ||
|
||||
process.env.AZURE_OPENAI_MODEL_PREF ||
|
||||
process.env.OPEN_MODEL_PREF;
|
||||
this.verbose = true;
|
||||
}
|
||||
|
||||
|
||||
@@ -268,7 +268,9 @@ class AgentHandler {
|
||||
"mistralai/Mixtral-8x7B-Instruct-v0.1"
|
||||
);
|
||||
case "azure":
|
||||
return process.env.OPEN_MODEL_PREF;
|
||||
return (
|
||||
process.env.AZURE_OPENAI_MODEL_PREF || process.env.OPEN_MODEL_PREF
|
||||
);
|
||||
case "koboldcpp":
|
||||
return process.env.KOBOLD_CPP_MODEL_PREF ?? null;
|
||||
case "localai":
|
||||
|
||||
@@ -440,7 +440,7 @@ function getBaseLLMProviderModel({ provider = null } = {}) {
|
||||
case "openai":
|
||||
return process.env.OPEN_MODEL_PREF;
|
||||
case "azure":
|
||||
return process.env.OPEN_MODEL_PREF;
|
||||
return process.env.AZURE_OPENAI_MODEL_PREF || process.env.OPEN_MODEL_PREF;
|
||||
case "anthropic":
|
||||
return process.env.ANTHROPIC_MODEL_PREF;
|
||||
case "gemini":
|
||||
|
||||
@@ -32,7 +32,7 @@ const KEY_MAPPING = {
|
||||
checks: [isNotEmpty],
|
||||
},
|
||||
AzureOpenAiModelPref: {
|
||||
envKey: "OPEN_MODEL_PREF",
|
||||
envKey: "AZURE_OPENAI_MODEL_PREF",
|
||||
checks: [isNotEmpty],
|
||||
},
|
||||
AzureOpenAiEmbeddingModelPref: {
|
||||
|
||||
Reference in New Issue
Block a user