1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
import process from "node:process";
export const conversationalQAChainEx = (async () => {
const { ChatOpenAI } = await import(
"https://esm.sh/langchain/chat_models/openai"
);
const { HNSWLib } = await import(
"https://esm.sh/langchain/vectorstores/hnswlib"
);
const { OpenAIEmbeddings } = await import(
"https://esm.sh/langchain/embeddings/openai"
);
const { ConversationalRetrievalQAChain } = await import(
"https://esm.sh/langchain/chains"
);
const gpt35 = new ChatOpenAI({
openAIApiKey: process.env.OPENAI_API_KEY,
modelName: "gpt-3.5-turbo",
temperature: 0,
});
const gpt4 = new ChatOpenAI({
openAIApiKey: process.env.OPENAI_API_KEY,
modelName: "gpt-4",
temperature: 0,
});
const vectorStore = await HNSWLib.fromTexts(
["Hello world", "Bye bye", "hello nice world", "bye", "hi"],
[{ id: 2 }, { id: 1 }, { id: 3 }, { id: 4 }, { id: 5 }],
new OpenAIEmbeddings({
openAIApiKey: process.env.OPENAI_API_KEY,
}),
);
const qaChain = ConversationalRetrievalQAChain.fromLLM(
gpt4,
vectorStore.asRetriever(),
{
questionGeneratorChainOptions: {
llm: gpt35, // Need for speed!
},
},
);
const chatHistory = [];
const query = `What did the president say about Ketanji Brown Jackson?`;
const response = await qaChain;
console.log("response");
})();
👆 This is a val. Vals are TypeScript snippets of code, written in the browser and run on our servers. Create scheduled functions, email yourself, and persist small pieces of data — all from the browser.