Search
Code3,898
// Model configurationconst DEFAULT_MODEL = "openai/gpt-oss-20b";const DEFAULT_MAX_CONTEXT_PAGES = 7; // Use top 5 search resultsconst DEFAULT_TEMPERATURE = 0.3;_2 or _3) to create a fresh table.### OpenAI```tsimport { OpenAI } from "https://esm.town/v/std/openai";const openai = new OpenAI();const completion = await openai.chat.completions.create({ messages: [ { role: "user", content: "Say hello in a creative way" },import { slack } from "./slack.ts";import { Hono } from "npm:hono";import { icp } from "./openai.ts";const app = new Hono();import { OpenAI } from "https://esm.town/v/std/openai";import { z } from "npm:zod@3.23.8";import { zodResponseFormat } from "npm:openai@5.12.2/helpers/zod";const openai = new OpenAI();const ICPResult = z.object({ }]; const resp = await openai.chat.completions.parse({ model: "gpt-5-mini", messages,const REALTIME_BASE_URL = "https://api.openai.com/v1/realtime";const OPENAI_API_KEY = Deno.env.get("OPENAI_API_KEY");if (!OPENAI_API_KEY) { throw new Error("π΄ OpenAI API key not configured");}export function makeHeaders(contentType?: string) { const obj: Record<string, string> = { Authorization: `Bearer ${OPENAI_API_KEY}`, }; if (contentType) obj["Content-Type"] = contentType;sip.post("/", async (c) => { // Verify the webhook. const OPENAI_SIGNING_SECRET = Deno.env.get("OPENAI_SIGNING_SECRET"); if (!OPENAI_SIGNING_SECRET) { console.error("π΄ webhook secret not configured"); return c.text("Internal error", 500); } const webhook = new Webhook(OPENAI_SIGNING_SECRET); const bodyStr = await c.req.text(); let callId: string | undefined;# hello-realtime**Hello Realtime** is a OpenAI Realtime app that supports both WebRTC and SIP(telephone) users. You can access the app via WebRTC at[hello-realtime.val.run](https://hello-realtime.val.run), or via SIP by callingserver-side websocket interface.If you remix the app, you'll just need to pop in your own `OPENAI_API_KEY` (from[platform.openai.com](https://platform.openai.com)), and if you want SIP, the`OPENAI_SIGNING_SECRET`.## Architecture - Browser connects to frontend - creates WebRTC offer - `/rtc` endpoint handles SDP negotiation with OpenAI - observer established to monitor session2. **SIP Flow**: <meta charset="utf-8" /> <meta name="viewport" content="width=device-width, initial-scale=1" /> <title>OpenAI Realtime API Voice Agent</title> <style> :root {const NR_TYPE = "near_field";const INSTRUCTIONS = ` Greet the user in English and tell them that they're using the OpenAI Realtime API, powered by the {{model}} model. Give them a very brief summary of the benefits of the Realtime API based on the headline below, and then ask if they have any questions. - higher audio quality - improved handling of alphanumerics (eg, properly understanding credit card and phone numbers) - support for the OpenAI Prompts API - support for MCP-based tools - auto-truncation to reduce context size// @ts-ignoreimport { OpenAI } from "https://esm.town/v/std/openai?v=4";// --- AI BEHAVIORAL GUIDELINES ---/** * This prompt instructs the OpenAI model to act as the cognitive core of * the autonomous agent described in the user's provided academic paper. * It is a novel application, using the AI not just to answer questions, if (req.method === "POST") { try { const openai = new OpenAI(); const body = await req.json(); ]; // Call OpenAI to get the agent's next step. const completion = await openai.chat.completions.create({ model: "gpt-4o", messages: messages,reconsumeralization
import { OpenAI } from "https://esm.town/v/std/openai";
import { sqlite } from "https://esm.town/v/stevekrouse/sqlite";
/**
* Practical Implementation of Collective Content Intelligence
* Bridging advanced AI with collaborative content creation
*/
exp
kwhinnery_openai
lost1991
import { OpenAI } from "https://esm.town/v/std/openai";
export default async function(req: Request): Promise<Response> {
if (req.method === "OPTIONS") {
return new Response(null, {
headers: {
"Access-Control-Allow-Origin": "*",
No docs found