This commit is contained in:
parent
fbaa2800c6
commit
fc910862d0
|
|
@ -13,7 +13,7 @@ export async function POST(req: Request) {
|
||||||
const { text, fileId, embeddingsProvider, fileExtension } = json as {
|
const { text, fileId, embeddingsProvider, fileExtension } = json as {
|
||||||
text: string
|
text: string
|
||||||
fileId: string
|
fileId: string
|
||||||
embeddingsProvider: "openai" | "local"
|
embeddingsProvider: "openai" | "local" | "bge-m3"
|
||||||
fileExtension: string
|
fileExtension: string
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -82,6 +82,8 @@ export async function POST(req: Request) {
|
||||||
})
|
})
|
||||||
|
|
||||||
embeddings = await Promise.all(embeddingPromises)
|
embeddings = await Promise.all(embeddingPromises)
|
||||||
|
} else if (embeddingsProvider === "bge-m3"){
|
||||||
|
// 示例:调用你自己的 BGE-M3 API 或本地函数
|
||||||
}
|
}
|
||||||
|
|
||||||
const file_items = chunks.map((chunk, index) => ({
|
const file_items = chunks.map((chunk, index) => ({
|
||||||
|
|
|
||||||
|
|
@ -136,8 +136,11 @@ export async function POST(req: Request) {
|
||||||
})
|
})
|
||||||
|
|
||||||
embeddings = await Promise.all(embeddingPromises)
|
embeddings = await Promise.all(embeddingPromises)
|
||||||
|
} else if (embeddingsProvider === "bge-m3"){
|
||||||
|
// 示例:调用你自己的 BGE-M3 API 或本地函数
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
const file_items = chunks.map((chunk, index) => ({
|
const file_items = chunks.map((chunk, index) => ({
|
||||||
file_id,
|
file_id,
|
||||||
user_id: profile.user_id,
|
user_id: profile.user_id,
|
||||||
|
|
|
||||||
|
|
@ -10,7 +10,7 @@ export async function POST(request: Request) {
|
||||||
const { userInput, fileIds, embeddingsProvider, sourceCount } = json as {
|
const { userInput, fileIds, embeddingsProvider, sourceCount } = json as {
|
||||||
userInput: string
|
userInput: string
|
||||||
fileIds: string[]
|
fileIds: string[]
|
||||||
embeddingsProvider: "openai" | "local"
|
embeddingsProvider: "openai" | "local" | "bge-m3"
|
||||||
sourceCount: number
|
sourceCount: number
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -84,6 +84,8 @@ export async function POST(request: Request) {
|
||||||
}
|
}
|
||||||
|
|
||||||
chunks = localFileItems
|
chunks = localFileItems
|
||||||
|
} else if (embeddingsProvider === "bge-m3"){
|
||||||
|
// 示例:调用你自己的 BGE-M3 API 或本地函数
|
||||||
}
|
}
|
||||||
|
|
||||||
const mostSimilarChunks = chunks?.sort(
|
const mostSimilarChunks = chunks?.sort(
|
||||||
|
|
|
||||||
|
|
@ -143,6 +143,7 @@ export const WorkspaceSettings: FC<WorkspaceSettingsProps> = ({}) => {
|
||||||
embeddingsProvider: defaultChatSettings.embeddingsProvider as
|
embeddingsProvider: defaultChatSettings.embeddingsProvider as
|
||||||
| "openai"
|
| "openai"
|
||||||
| "local"
|
| "local"
|
||||||
|
| "bge-m3"
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -8,7 +8,7 @@ export interface ChatSettings {
|
||||||
contextLength: number
|
contextLength: number
|
||||||
includeProfileContext: boolean
|
includeProfileContext: boolean
|
||||||
includeWorkspaceInstructions: boolean
|
includeWorkspaceInstructions: boolean
|
||||||
embeddingsProvider: "openai" | "local"
|
embeddingsProvider: "openai" | "local" | "bge-m3"
|
||||||
}
|
}
|
||||||
|
|
||||||
export interface ChatPayload {
|
export interface ChatPayload {
|
||||||
|
|
|
||||||
Loading…
Reference in New Issue