Search

3,276 results found for openai (1668ms)

Code
3,181

// @ts-ignore
import { OpenAI } from "https://esm.town/v/std/openai?v=4";
import { Hono } from "npm:hono@4.4.12";
const app = new Hono();
// Endpoint to generate a new world using OpenAI
app.post("/build-world", async (c) => {
try {
}
const openai = new OpenAI();
const completion = await openai.chat.completions.create({
model: "gpt-4o",
messages: [
Configure the following variables in your environment:
- `AGENT_API_KEY` (This is a secure token that you choose to secure the agent.tsx POST endpoint)
- `OPENAI_API_KEY` (An OpenAI API Key)
- `EXA_API_KEY` (Optional, though needed if you use the web search tool)
join/goog/main.tsx
1 matches
<li>✔ Everything in Momentum, PLUS:</li>
<li>✔ Google Ads: Strategic Scaling ($1,500 Ad Budget).</li>
<li>✔ AI Customer Assistant: Custom Deployment (Val Town & OpenAI).</li>
<li>✔ Google Cloud Platform: Consultation & Credit Maximization.</li>
<li>✔ Dedicated Account Manager: Direct Strategic Partner.</li>
import { z } from "npm:zod@4.0.5";
import { generateText, stepCountIs, tool } from "npm:ai@5.0.15";
import { openai } from "npm:@ai-sdk/openai@2.0.15";
import { Sandbox } from "npm:@e2b/code-interpreter";
const result = await generateText({
model: openai("gpt-4o"),
stopWhen: stepCountIs(5),
tools: {
join/somm/main.tsx
3 matches
// @ts-ignore
import { OpenAI } from "https://esm.town/v/std/openai?v=4";
// --- AI BEHAVIORAL GUIDELINES ---
if (req.method === "POST") {
try {
const openai = new OpenAI();
const body = await req.json();
];
const completion = await openai.chat.completions.create({
model: "gpt-4o",
messages: messages,
Note: When changing a SQLite table's schema, change the table's name (e.g., add _2 or _3) to cre
### OpenAI
```ts
import { OpenAI } from "https://esm.town/v/std/openai";
const openai = new OpenAI();
const completion = await openai.chat.completions.create({
messages: [
{ role: "user", content: "Say hello in a creative way" },
Note: When changing a SQLite table's schema, change the table's name (e.g., add _2 or _3) to cre
### OpenAI
```ts
import { OpenAI } from "https://esm.town/v/std/openai";
const openai = new OpenAI();
const completion = await openai.chat.completions.create({
messages: [
{ role: "user", content: "Say hello in a creative way" },
join/sil/main.tsx
3 matches
// @ts-nocheck
import { OpenAI } from "https://esm.town/v/std/openai?v=4";
// --- AI BEHAVIORAL GUIDELINES ---
if (url.pathname === "/generatePrompts" && req.method === "POST") {
try {
const openai = new OpenAI();
const { subject } = await req.json();
}
const completion = await openai.chat.completions.create({
model: "gpt-4o",
messages: [
import { streamText } from "npm:hono@4.4.12/streaming";
// @ts-ignore
import { OpenAI } from "https://esm.town/v/std/openai?v=4";
// --- TYPE DEFINITIONS ---
// --- API HELPER ---
async function callOpenAI(
systemPrompt: string,
userContent: string | object,
isJson = true,
) {
const openai = new OpenAI();
const content = typeof userContent === "string"
? userContent
: JSON.stringify(userContent);
const completion = await openai.chat.completions.create({
model: "gpt-4o",
messages: [
}
const { context } = await c.req.json();
const result = await callOpenAI(LIST_GENERATOR_PROMPT(type, context), "");
return c.json(result);
});
app.post("/api/prompt/dynamic", async (c) => {
const body = await c.req.json();
const result = await callOpenAI(PROMPT_REFINER_PROMPT, body);
return c.json(result);
});
app.post("/api/inputs", async (c) => {
const { refined_prompt } = await c.req.json();
const result = await callOpenAI(FORM_GENERATOR_PROMPT, { refined_prompt });
return c.json(result);
});
app.post("/api/clarify", async (c) => {
const { refined_prompt } = await c.req.json();
const result = await callOpenAI(CLARIFICATION_PROMPT, { refined_prompt });
return c.json(result);
});
inputs: { ...user_inputs, ...clarifications },
};
const raw_output_v1 = await callOpenAI(
"Execute the provided prompt template using the given inputs. Produce only the raw output.",
v1UserContent,
output: raw_output_v1,
};
const { criteria } = await callOpenAI(
CRITERIA_GENERATOR_PROMPT,
criteriaContext,
criteria: criteria,
};
const evaluation_v1: Evaluation = await callOpenAI(
EVALUATOR_PROMPT,
v1EvalContext,
evaluation: evaluation_v1,
};
const raw_output_v2 = await callOpenAI(
REFINER_PROMPT,
v2RefineContext,
criteria: criteria,
};
const evaluation_v2: Evaluation = await callOpenAI(
EVALUATOR_PROMPT,
v2EvalContext,
import { Hono } from "npm:hono@4.4.12";
// @ts-ignore
import { OpenAI } from "https://esm.town/v/std/openai";
import type { Context } from "npm:hono@4.4.12";
import { streamText } from "npm:hono/streaming";
const services = {
/**
* A centralized function for making calls to the OpenAI API.
* @param systemPrompt - The system prompt to guide the AI.
* @param userContent - The user's input.
* @param options - Additional options like model, streaming, and response format.
* @returns A promise that resolves to the parsed JSON response or an OpenAI stream.
*/
async callOpenAI(
systemPrompt: string,
userContent: string | object,
} = options;
const openai = new OpenAI();
const messages: any[] = [
{ role: "system", content: systemPrompt },
try {
const completion = await openai.chat.completions.create(requestPayload);
if (stream && c) {
return completion;
} catch (e) {
console.error(`Error calling OpenAI: ${e.message}`);
throw new Error("AI service failed.");
}
...body,
};
return services.callOpenAI(
config.prompts.DYNAMIC_LIST_GENERATOR,
userContent,
JSON.stringify(company_context)
}\n\nOccupation: ${occupation_title}\n\nTask: ${task}`;
return services.callOpenAI(config.prompts.PROMPT_REFINER, userContent, {
c,
isJson: true,
app.post("/api/inputs", async (c: Context) => {
const { refined_prompt } = await c.req.json<InputsBody>();
return services.callOpenAI(config.prompts.INPUT_EXTRACTOR, refined_prompt, {
c,
isJson: true,
app.post("/api/clarify", async (c: Context) => {
const { refined_prompt } = await c.req.json<ClarifyBody>();
return services.callOpenAI(
config.prompts.CLARIFICATION_AGENT,
refined_prompt,
return streamText(c, async (stream) => {
try {
const openai = new OpenAI();
// 2. First call to the AI to see if it wants to use a tool
const initialResponse = await openai.chat.completions.create({
model: config.models.major,
messages,
// 5. Make the final call with tool results included, and stream the response
const finalStream = await openai.chat.completions.create({
model: config.models.major,
messages,
JSON.stringify(company_context)
}\n\nTask Briefing:\n${refined_prompt}`;
return services.callOpenAI(
config.prompts.EVALUATION_CRITERIA_GENERATOR,
userContent,
const systemPrompt =
`${config.prompts.EVALUATOR_AGENT}\n\nOutput Language: ${language}`;
return services.callOpenAI(systemPrompt, userContent, { c, stream: true });
});
const systemPrompt =
`${config.prompts.EVALUATOR_AGENT}\n\nOutput Language: ${language}`;
return services.callOpenAI(systemPrompt, userContent, { c, stream: true });
});
const systemPrompt =
`${config.prompts.REFINER_AGENT}\n\nOutput Language: ${language}`;
return services.callOpenAI(systemPrompt, userContent, { c, stream: true });
});