Search
Code3,282
import { fetch } from "https://esm.town/v/std/fetch";import { OpenAI } from "https://esm.town/v/std/openai";import { z } from "npm:zod";}async function callOpenAI(sysP: string, userP: string, mid: string, tid: string, log: LogFn): Promise<string | null> { log("DB", "OpenAI", `Call tid=${tid}`, { sL: sysP.length, uL: userP.length }, mid, tid); try { // @ts-ignore const oai = new OpenAI(); const comp = await oai.chat.completions.create({ model: "gpt-4o-mini", // Consider gpt-4o for more complex tasks if budget allows const usg = comp.usage; if (!resT) { log("WN", "OpenAI", `No text tid=${tid}.`, { usg, fin: comp.choices[0]?.finish_reason }, mid, tid); return null; } log("IN", "OpenAI", `OK tid=${tid}`, { rL: resT.length, usg, fin: comp.choices[0]?.finish_reason }, mid, tid); return resT.trim(); } catch (err: any) { st: err.status, }; log("ER", "OpenAI", `Fail tid=${tid}:${err.message}`, { e: eD }, mid, tid); throw new Error( `OpenAI API Call Failed: ${err.message}` + (err.code ? ` (Code:${err.code}, Status:${err.status})` : (err.status ? ` (Status:${err.status})` : "")), ); const filledUserPrompt = Utils.fillPromptTemplate(userPrompt, params); const rawOpenAIResponse = await callOpenAI(systemPrompt, filledUserPrompt, mid, tid, logFn); if (!rawOpenAIResponse) { logFn("WN", agentConfig.name, `OpenAI call returned no content, tid=${tid}.`, {}, mid, tid); return { mid, cid: tid, p: {} as TOD, e: `${agentConfig.name} Error: AI returned no content.` }; } let outputData: TOD; try { outputData = agentConfig.outputParser(rawOpenAIResponse); } catch (parseError: any) { agentConfig.name, `Output parsing failed, tid=${tid}. M: ${parseError.message}`, { rawResponsePreview: rawOpenAIResponse.slice(0, 500) }, mid, tid,
} from "https://esm.town/v/join/mech2/src/coreTypes.ts";import { Utils } from "https://esm.town/v/join/mech2/src/utils.ts";import { OpenAI } from "https://esm.town/v/std/openai";export class LogAgent {}export async function callOpenAI( sysP: string, userP: string, log: LogFn,): Promise<string | null> { log("DB", "OpenAI", `Call tid=${tid}`, { sL: sysP.length, uL: userP.length }, mid, tid); try { const oai = new OpenAI(); const comp = await oai.chat.completions.create({ model: "gpt-4o-mini", const usg = comp.usage; if (!resT) { log("WN", "OpenAI", `No text tid=${tid}.`, { usg, fin: comp.choices[0]?.finish_reason }, mid, tid); return null; } log("IN", "OpenAI", `OK tid=${tid}`, { rL: resT.length, usg, fin: comp.choices[0]?.finish_reason }, mid, tid); return resT.trim(); } catch (err: any) { st: err.status, }; log("ER", "OpenAI", `Fail tid=${tid}:${err.message}`, { e: eD }, mid, tid); throw new Error( `OpenAI API Call Failed: ${err.message}` + (err.code ? ` (Code:${err.code}, Status:${err.status})` : (err.status ? ` (Status:${err.status})` : "")), ); } const filledUserPrompt = Utils.fillPromptTemplate(userPrompt, params); const rawOpenAIResponse = await callOpenAI(systemPrompt, filledUserPrompt, mid, tid, logFn); if (!rawOpenAIResponse) { logFn("WN", agentConfig.name, `OpenAI no content tid=${tid}.`, {}, mid, tid); return { mid, cid: tid, p: {} as TOD, e: `${agentConfig.name} Err:AI no content.` }; } let outputData: TOD; try { outputData = agentConfig.outputParser(rawOpenAIResponse); } catch (parseError: any) { logFn( agentConfig.name, `ParseFail tid=${tid}. M:${parseError.message}`, { rRPrev: rawOpenAIResponse.slice(0, 500) }, mid, tid,
import { fetch } from "https://esm.town/v/std/fetch";import { OpenAI } from "https://esm.town/v/std/openai";import { z } from "npm:zod";}async function callOpenAI(sysP: string, userP: string, mid: string, tid: string, log: LogFn): Promise<string | null> { log("DB", "OpenAI", `Call tid=${tid}`, { sL: sysP.length, uL: userP.length }, mid, tid); try { // @ts-ignore const oai = new OpenAI(); const comp = await oai.chat.completions.create({ model: "gpt-4o-mini", // Consider gpt-4o for more complex real estate tasks if needed const usg = comp.usage; if (!resT) { log("WN", "OpenAI", `No text tid=${tid}.`, { usg, fin: comp.choices[0]?.finish_reason }, mid, tid); return null; } log("IN", "OpenAI", `OK tid=${tid}`, { rL: resT.length, usg, fin: comp.choices[0]?.finish_reason }, mid, tid); return resT.trim(); } catch (err: any) { st: err.status, }; log("ER", "OpenAI", `Fail tid=${tid}:${err.message}`, { e: eD }, mid, tid); throw new Error( `OpenAI API Call Failed: ${err.message}` + (err.code ? ` (Code:${err.code}, Status:${err.status})` : (err.status ? ` (Status:${err.status})` : "")), ); const filledUserPrompt = Utils.fillPromptTemplate(userPrompt, params); const rawOpenAIResponse = await callOpenAI(systemPrompt, filledUserPrompt, mid, tid, logFn); if (!rawOpenAIResponse) { logFn("WN", agentConfig.name, `OpenAI call returned no content, tid=${tid}.`, {}, mid, tid); return { mid, cid: tid, p: {} as TOD, e: `${agentConfig.name} Error: AI returned no content.` }; } let outputData: TOD; try { outputData = agentConfig.outputParser(rawOpenAIResponse); } catch (parseError: any) { agentConfig.name, `Output parsing failed, tid=${tid}. M: ${parseError.message}`, { rawResponsePreview: rawOpenAIResponse.slice(0, 500) }, mid, tid,
import { fetch } from "https://esm.town/v/std/fetch";import { OpenAI } from "https://esm.town/v/std/openai";import { z } from "npm:zod";}async function callOpenAI(sysP: string, userP: string, mid: string, tid: string, log: LogFn): Promise<string | null> { log("DB", "OpenAI", `Call tid=${tid}`, { sL: sysP.length, uL: userP.length }, mid, tid); try { // @ts-ignore const oai = new OpenAI(); const comp = await oai.chat.completions.create({ model: "gpt-4o-mini", const usg = comp.usage; if (!resT) { log("WN", "OpenAI", `No text tid=${tid}.`, { usg, fin: comp.choices[0]?.finish_reason }, mid, tid); return null; } log("IN", "OpenAI", `OK tid=${tid}`, { rL: resT.length, usg, fin: comp.choices[0]?.finish_reason }, mid, tid); return resT.trim(); } catch (err: any) { st: err.status, }; log("ER", "OpenAI", `Fail tid=${tid}:${err.message}`, { e: eD }, mid, tid); throw new Error( `OpenAI API Call Failed: ${err.message}` + (err.code ? ` (Code:${err.code}, Status:${err.status})` : (err.status ? ` (Status:${err.status})` : "")), ); } const filledUserPrompt = Utils.fillPromptTemplate(userPrompt, params); const rawOpenAIResponse = await callOpenAI(systemPrompt, filledUserPrompt, mid, tid, logFn); if (!rawOpenAIResponse) { logFn("WN", agentConfig.name, `OpenAI call returned no content, tid=${tid}.`, {}, mid, tid); return { mid, cid: tid, p: {} as TOD, e: `${agentConfig.name} Error: AI returned no content.` }; } let outputData: TOD; try { outputData = agentConfig.outputParser(rawOpenAIResponse); } catch (parseError: any) { logFn( agentConfig.name, `Output parsing failed, tid=${tid}. M: ${parseError.message}`, { rawResponsePreview: rawOpenAIResponse.slice(0, 500) }, mid, tid,
export async function emailValHandler(receivedEmail) { const llmApiUrl = "https://api.anthropic.com/v1/messages"; const apiKey = Deno.env.get("ANTHROPIC_API_KEY"); // replace this entire line with your OpenAI API key as a string, e.g., "sk-123..." or use environment variable: https://docs.val.town/reference/environment-variables/ const model = "claude-opus-4-20250514"; const prompt = generatePrompt(receivedEmail, pdfTexts, emailText); // step 4: send prompt to openai const aiResponse = await sendRequestToAI(prompt, llmApiUrl, apiKey, model); // log the openai response console.log("openai response:", aiResponse); // step 5: send the response back via email}// helper function to generate a prompt for openaifunction generatePrompt(email, pdfTexts, emailText) { // extract the first name from the 'from' field if it exists}// helper function to send a request to openaiasync function sendRequestToAI(prompt, llmApiUrl, apiKey, model) { try { // prepare the openai messages payload const messages = [ { }; // send the request to openai const response = await fetch(llmApiUrl, { method: "POST", const data = await response.json(); console.log(data); return data.choices[0]?.message?.content || "no response from openai."; } catch (err) { console.error("error in sendRequestToAI:", err);
This script allows you to:- Send emails to OpenAI. The text will be treated as the prompt- Parse PDF attachments and include their contents in the AI's analysis.- Get response directly to your inbox.1. Copy this Val and save it as an Email Val (choose Val type in top-right corner of editor)2. Add your OpenAI API key to line 8 (or use an environment variable: https://docs.val.town/reference/environment-variables/)3. Copy the email address of the Val (click 3 dots in top-right > Copy > Copy email address)
// main controller functionexport async function emailValHandler(receivedEmail) { const openaiUrl = "https://api.openai.com/v1/chat/completions"; const apiKey = Deno.env.get("OPENAI_KEY"); // replace this entire line with your OpenAI API key as a string, e.g., "sk-123..." or use environment variable: https://docs.val.town/reference/environment-variables/ const model = "gpt-4o-mini"; if (!apiKey) { throw new Error( "OPENAI_KEY environment variable is not set. Please set it or replace this line with your API key.", ); } const prompt = generatePrompt(receivedEmail, pdfTexts, emailText); // step 4: send prompt to openai const openaiResponse = await sendRequestToOpenAI(prompt, openaiUrl, apiKey, model); // log the openai response console.log("openai response:", openaiResponse); // step 5: send the response back via email await sendResponseByEmail(receivedEmail.from, openaiResponse); console.log("response email sent.");}// helper function to generate a prompt for openaifunction generatePrompt(email, pdfTexts, emailText) { // extract the first name from the 'from' field if it exists}// helper function to send a request to openaiasync function sendRequestToOpenAI(prompt, openaiUrl, apiKey, model) { try { // prepare the openai messages payload const messages = [ { }; // send the request to openai const response = await fetch(openaiUrl, { method: "POST", body: JSON.stringify(body), // parse the response const data = await response.json(); return data.choices[0]?.message?.content || "no response from openai."; } catch (err) { console.error("error in sendRequestToOpenAI:", err); return "error processing your request."; }
Note: When changing a SQLite table's schema, change the table's name (e.g., add _2 or _3) to create a fresh table.### OpenAI```tsimport { OpenAI } from "https://esm.town/v/std/openai";const openai = new OpenAI();const completion = await openai.chat.completions.create({messages: [{ role: "user", content: "Say hello in a creative way" },
Note: When changing a SQLite table's schema, change the table's name (e.g., add _2 or _3) to create a fresh table.### OpenAI```tsimport { OpenAI } from "https://esm.town/v/std/openai";const openai = new OpenAI();const completion = await openai.chat.completions.create({ messages: [ { role: "user", content: "Say hello in a creative way" },
Note: When changing a SQLite table's schema, change the table's name (e.g., add _2 or _3) to create a fresh table.### OpenAI```tsimport { OpenAI } from "https://esm.town/v/std/openai";const openai = new OpenAI();const completion = await openai.chat.completions.create({messages: [{ role: "user", content: "Say hello in a creative way" },
reconsumeralization
import { OpenAI } from "https://esm.town/v/std/openai";
import { sqlite } from "https://esm.town/v/stevekrouse/sqlite";
/**
* Practical Implementation of Collective Content Intelligence
* Bridging advanced AI with collaborative content creation
*/
exp
kwhinnery_openai
lost1991
import { OpenAI } from "https://esm.town/v/std/openai";
export default async function(req: Request): Promise<Response> {
if (req.method === "OPTIONS") {
return new Response(null, {
headers: {
"Access-Control-Allow-Origin": "*",
No docs found