| 12
 3
 4
 5
 6
 7
 8
 9
 10
 11
 12
 13
 14
 15
 16
 17
 18
 19
 20
 21
 22
 23
 24
 25
 26
 27
 28
 29
 30
 31
 32
 33
 34
 35
 36
 37
 38
 39
 40
 41
 42
 43
 44
 45
 46
 47
 48
 49
 50
 51
 52
 53
 54
 55
 56
 57
 58
 59
 60
 61
 62
 63
 64
 65
 66
 67
 68
 69
 70
 71
 72
 73
 74
 75
 76
 77
 78
 79
 80
 81
 82
 83
 84
 85
 86
 87
 88
 89
 90
 91
 92
 93
 94
 95
 96
 97
 98
 99
 100
 101
 102
 103
 104
 105
 106
 107
 108
 109
 110
 111
 112
 113
 114
 115
 116
 117
 118
 119
 120
 121
 122
 123
 124
 125
 126
 127
 128
 129
 130
 131
 132
 133
 134
 135
 
 | import { OpenAI, OpenAIEmbeddings, ChatOpenAI } from "@langchain/openai";
 import { ConversationalRetrievalQAChain } from "langchain/chains";
 import { HNSWLib } from "@langchain/community/vectorstores/hnswlib";
 import { RecursiveCharacterTextSplitter } from "@langchain/textsplitters";
 import { BufferMemory } from "langchain/memory";
 export const createEmbeddings = async (req: NextRequest) => {
 const { id } = getRannieServerSession(req);
 const form = await req.formData();
 const vectorStoreIndex = form.get("vectorStoreIndex") as string;
 const vectorStoreDirectory = `${process.cwd()}/temp/${id}-${vectorStoreIndex}`;
 vectorStoreDirectoryProcessing.add(vectorStoreDirectory);
 
 
 
 
 const textSplitter = new RecursiveCharacterTextSplitter({ chunkSize: 1000 });
 
 const embedding = new OpenAIEmbeddings({
 model: "text-embedding-3-large",
 apiKey: getApiKey(),
 });
 const file = form.get("file") as File;
 const text = await readFileContent(file);
 const docs = await textSplitter.createDocuments([text]);
 let vectorStore: HNSWLib;
 if (fs.existsSync(vectorStoreDirectory)) {
 vectorStore = await HNSWLib.load(vectorStoreDirectory, embedding);
 await vectorStore.addDocuments(docs);
 } else {
 createDirectoryRecursively(vectorStoreDirectory);
 vectorStore = await HNSWLib.fromDocuments(docs, embedding);
 }
 await vectorStore.save(vectorStoreDirectory);
 vectorStoreDirectoryProcessing.delete(vectorStoreDirectory);
 
 return true;
 };
 
 export async function qaDocument(req: NextRequest) {
 const requestBody = await req.clone().json();
 const { id } = getRannieServerSession(req);
 const { vectorStoreIndex } = requestBody ?? {};
 const vectorStoreDirectory = `${process.cwd()}/temp/${id}-${vectorStoreIndex}`;
 
 if (
 !fs.existsSync(vectorStoreDirectory) ||
 vectorStoreDirectoryProcessing.has(vectorStoreDirectory)
 ) {
 return NextResponse.json(getError(30004), {
 status: 400,
 });
 }
 
 
 const model = new ChatOpenAI({
 model: "gpt-4o",
 streaming: true,
 streamUsage: true,
 apiKey: getApiKey(),
 });
 const embedding = new OpenAIEmbeddings({
 model: "text-embedding-3-large",
 apiKey: getApiKey(),
 });
 const vectorStore = await HNSWLib.load(vectorStoreDirectory, embedding);
 
 const bufferMemory = new BufferMemory({
 memoryKey: "chat_history",
 });
 let question = "";
 for (let i = 0; i < requestBody.messages.length; i++) {
 const message = requestBody.messages[i];
 if (message.role === "system") {
 bufferMemory.chatHistory.addMessage(new SystemMessage(message));
 } else if (message.role === "user") {
 if (i === requestBody.messages.length - 1) {
 question = message.content as string;
 } else {
 bufferMemory.chatHistory.addMessage(new HumanMessage(message));
 }
 } else if (message.role === "assistant") {
 bufferMemory.chatHistory.addMessage(
 new AIMessage({ content: message.content || "" }),
 );
 }
 }
 
 const chain = ConversationalRetrievalQAChain.fromLLM(
 model,
 vectorStore.asRetriever(),
 {
 memory: bufferMemory,
 qaChainOptions: {
 type: "stuff",
 },
 },
 );
 const body = new ReadableStream({
 start(controller) {
 chain
 .invoke(
 { question },
 {
 callbacks: [
 
 {
 handleLLMNewToken: (token) =>
 controller.enqueue(
 `data: ${JSON.stringify({
 choices: [{ delta: { content: token } }],
 })}\n\n`,
 ),
 handleLLMEnd(output) {
 },
 },
 ],
 },
 )
 .then((res) => {
 controller.enqueue("data: [DONE]");
 })
 .finally(() => {
 controller.close();
 });
 },
 });
 const newHeaders = new Headers();
 newHeaders.set("Content-Type", "text/event-stream");
 newHeaders.set("Cache-Control", "no-cache");
 newHeaders.set("Connection", "keep-alive");
 return new Response(body, {
 headers: newHeaders,
 });
 }
 
 |