Refactor api endpoint chat handler to its own function (#2157)

remove legacy `chatWithWorkspace` and cleanup `index.js`
This commit is contained in:
Timothy Carambat
2024-08-21 14:47:06 -07:00
committed by GitHub
parent f7756d4758
commit 1f96b837b3
5 changed files with 514 additions and 231 deletions

View File

@@ -3,7 +3,6 @@ const { WorkspaceThread } = require("../../../models/workspaceThread");
const { Workspace } = require("../../../models/workspace");
const { validApiKey } = require("../../../utils/middleware/validApiKey");
const { reqBody, multiUserMode } = require("../../../utils/http");
const { chatWithWorkspace } = require("../../../utils/chats");
const {
streamChatWithWorkspace,
VALID_CHAT_MODE,
@@ -16,6 +15,7 @@ const {
} = require("../../../utils/helpers/chat/responses");
const { WorkspaceChats } = require("../../../models/workspaceChats");
const { User } = require("../../../models/user");
const { ApiChatHandler } = require("../../../utils/chats/apiChatHandler");
function apiWorkspaceThreadEndpoints(app) {
if (!app) return;
@@ -405,13 +405,13 @@ function apiWorkspaceThreadEndpoints(app) {
}
const user = userId ? await User.get({ id: Number(userId) }) : null;
const result = await chatWithWorkspace(
const result = await ApiChatHandler.chatSync({
workspace,
message,
mode,
user,
thread
);
thread,
});
await Telemetry.sendTelemetry("sent_chat", {
LLMSelection: process.env.LLM_PROVIDER || "openai",
Embedder: process.env.EMBEDDING_ENGINE || "inherit",
@@ -556,14 +556,14 @@ function apiWorkspaceThreadEndpoints(app) {
response.setHeader("Connection", "keep-alive");
response.flushHeaders();
await streamChatWithWorkspace(
await ApiChatHandler.streamChat({
response,
workspace,
message,
mode,
user,
thread
);
thread,
});
await Telemetry.sendTelemetry("sent_chat", {
LLMSelection: process.env.LLM_PROVIDER || "openai",
Embedder: process.env.EMBEDDING_ENGINE || "inherit",