Search

3,263 results found for openai (1917ms)

Code
3,168

export default async function server(request: Request): Promise<Response> {
const { sqlite } = await import("https://esm.town/v/stevekrouse/sqlite");
const { OpenAI } = await import("https://esm.town/v/std/openai");
const KEY = "MyProject";
const SCHEMA_VERSION = 3;
`);
const openai = new OpenAI();
if (request.method === "POST") {
if (request.url.includes("/ai-suggestion")) {
const completion = await openai.chat.completions.create({
messages: [
{
import OpenAI from "npm:openai";
const openai = new OpenAI();
export default async (req) => {
// Create the stream with the signal
const stream = await openai.chat.completions.create({
model: "gpt-3.5-turbo",
messages: [{ role: "user", content: "Tell me a story" }],
// Handle connection closed event
req.signal.addEventListener("abort", () => {
console.log("Client disconnected, aborting OpenAI request");
controller.abort();
writer.close().catch(err => {
import OpenAI from "npm:openai";
const openai = new OpenAI();
export default async (req) => {
try {
const stream = await openai.chat.completions.create({
model: "gpt-3.5-turbo",
messages: [{
Frontend: React 18, TailwindCSS
APIs: Open-Meteo, OpenAI GPT-4o
Hosting: ESM-based module imports
// Fetch AI-powered weather insight
const { OpenAI } = await import("https://esm.town/v/std/openai");
const openai = new OpenAI();
const aiResponse = await openai.chat.completions.create({
messages: [
{
// Fetch AI-powered weather insight
const { OpenAI } = await import("https://esm.town/v/std/openai");
const openai = new OpenAI();
const aiResponse = await openai.chat.completions.create({
messages: [
{
Frontend: React 18, TailwindCSS
APIs: Open-Meteo, OpenAI GPT-4o
Hosting: ESM-based module imports
export default async function server(request: Request): Promise<Response> {
const { OpenAI } = await import("https://esm.town/v/std/openai");
// Enhanced server-side logging
}
const openai = new OpenAI();
const url = new URL(request.url);
// Call AgentA to create the tool definition
const agentACompletion = await openai.chat.completions.create({
model: "gpt-4o-mini",
response_format: { type: "json_object" },
`;
const agentBCompletion = await openai.chat.completions.create({
model: "gpt-4o-mini",
response_format: { type: "json_object" },
});
// Use AgentB (a separate OpenAI instance) to process the tool request
const agentBPrompt = `
You are AgentB, a specialized tool agent designed to process specific information reques
`;
const agentBCompletion = await openai.chat.completions.create({
model: "gpt-4o-mini",
response_format: { type: "json_object" },
});
// Fallback response if OpenAI fails
let agentAResponse: AgentResponse = {
actionType: "direct_response",
`;
const agentACompletion = await openai.chat.completions.create({
model: "gpt-4o-mini",
response_format: { type: "json_object" },
web/base/main.tsx
8 matches
export default async function server(request: Request): Promise<Response> {
const { OpenAI } = await import("https://esm.town/v/std/openai");
const { sqlite } = await import("https://esm.town/v/stevekrouse/sqlite");
};
const openai = new OpenAI();
const KEY = extractKey(new URL(import.meta.url));
const SCHEMA_VERSION = 3; // Increment schema version for tool support
// Call AgentA to create the tool definition
const agentACompletion = await openai.chat.completions.create({
model: "gpt-4o-mini",
response_format: { type: "json_object" },
`;
const agentBCompletion = await openai.chat.completions.create({
model: "gpt-4o-mini",
response_format: { type: "json_object" },
}
// Use AgentB (a separate OpenAI instance) to process the tool request
const agentBPrompt = `
You are AgentB, a specialized tool agent designed to process specific information reques
`;
const agentBCompletion = await openai.chat.completions.create({
model: "gpt-4o-mini",
response_format: { type: "json_object" },
});
// Fallback response if OpenAI fails
let agentAResponse: AgentResponse = {
actionType: "direct_response",
`;
const agentACompletion = await openai.chat.completions.create({
model: "gpt-4o-mini",
response_format: { type: "json_object" },
export default async function server(request: Request): Promise<Response> {
const { OpenAI } = await import("https://esm.town/v/std/openai");
const { sqlite } = await import("https://esm.town/v/stevekrouse/sqlite");
const openai = new OpenAI();
const url = new URL(request.url);
const path = url.pathname.split("/").filter(Boolean);
`;
const completion = await openai.chat.completions.create({
model: "gpt-4o-mini",
messages: [{ role: "user", content: componentPrompt }],