1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
import process from "node:process";
export const multipleKeysAndMemoryConversationChainExample = (async () => {
const { ChatOpenAI } = await import(
"https://esm.sh/langchain/chat_models/openai"
);
const { BufferMemory } = await import("https://esm.sh/langchain/memory");
const {
ChatPromptTemplate,
SystemMessagePromptTemplate,
HumanMessagePromptTemplate,
MessagesPlaceholder,
} = await import("https://esm.sh/langchain/prompts");
const { ConversationChain } = await import("https://esm.sh/langchain/chains");
const llm = new ChatOpenAI({
modelName: "gpt-3.5-turbo",
openAIApiKey: process.env.OPENAI_API_KEY,
temperature: 0,
});
const memory = new BufferMemory({
memoryKey: "chat_history",
inputKey: "question",
returnMessages: true,
});
const prompt = ChatPromptTemplate.fromPromptMessages([
SystemMessagePromptTemplate.fromTemplate(
`There are {vegetables}, {fruit} and {meat} sorts available for cooking, create a receipte given human input using some of these ingredients as a basis.
This is the existing menu {menu}, dishes must not include any ingredient already in the existing menu.`,
),
new MessagesPlaceholder("chat_history"),
HumanMessagePromptTemplate.fromTemplate("{question}"),
]);
const chain = new ConversationChain({ llm, memory, prompt });
const result = await chain.call({
vegetables: ["carrot", "potato", "tomato"].join(", "),
fruit: ["apple", "banana", "orange"].join(", "),
meat: ["chicken", "beef", "pork"].join(", "),
menu: ["chicken soup", "beef steak", "pork chop"].join(", "),
question: "What is a good recipe with the above ingredients?",
});
console.log({ result });
return result;
})();
👆 This is a val. Vals are TypeScript snippets of code, written in the browser and run on our servers. Create scheduled functions, email yourself, and persist small pieces of data — all from the browser.