Back to packages list

Vals using langchain/llms/openai

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
import process from "node:process";
export const langchainEx = (async () => {
const { OpenAI } = await import("https://esm.sh/langchain/llms/openai");
const { PromptTemplate } = await import("https://esm.sh/langchain/prompts");
const { LLMChain } = await import("https://esm.sh/langchain/chains");
const model = new OpenAI({
temperature: 0.9,
openAIApiKey: process.env.openai,
maxTokens: 100,
}, {});
const template = "What is a good name for a company that makes {product}?";
const prompt = new PromptTemplate({
template: template,
inputVariables: ["product"],
});
const chain = new LLMChain({ llm: model, prompt: prompt });
const res = await chain.call({ product: "colorful socks" });
console.log(res);
return res;
})();
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
import process from "node:process";
export const streamingTest = (async () => {
const { OpenAI } = await import("https://esm.sh/langchain/llms/openai");
// To enable streaming, we pass in `streaming: true` to the LLM constructor.
// Additionally, we pass in a handler for the `handleLLMNewToken` event.
const chat = new OpenAI({
maxTokens: 25,
streaming: true,
openAIApiKey: process.env.OPENAI_API_KEY,
});
const response = await chat.call("Tell me a joke.", undefined, [
{
handleLLMNewToken(token: string) {
console.log({ token });
},
},
]);
console.log(response);
})();
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
import process from "node:process";
export const langchainEx = (async () => {
const { OpenAI } = await import("https://esm.sh/langchain/llms/openai");
const { PromptTemplate } = await import("https://esm.sh/langchain/prompts");
const { LLMChain } = await import("https://esm.sh/langchain/chains");
const model = new OpenAI({
temperature: 0.9,
openAIApiKey: process.env.openai,
maxTokens: 100,
});
const template = "What is a good name for a company that makes {product}?";
const prompt = new PromptTemplate({
template: template,
inputVariables: ["product"],
});
const chain = new LLMChain({ llm: model, prompt: prompt });
const res = await chain.call({ product: "colorful socks" });
return res;
})();
1
Next