Search
Code3,657
// @ts-ignoreimport { OpenAI } from "https://esm.town/v/std/openai?v=4";// @ts-ignoreimport { blob } from "https://esm.town/v/std/blob?v=11";const MAX_BREADTH_PER_NODE = 10; // Safety cap for AI verbosityconst MAX_CONCURRENCY = 8; // Max parallel AI callsconst openai = new OpenAI();const limit = pLimit(MAX_CONCURRENCY);async function callAI(fact: string, prompt: string): Promise<Prerequisite[]> {  try {    const completion = await openai.chat.completions.create({      model: "gpt-4o",      messages: [Note: When changing a SQLite table's schema, change the table's name (e.g., add _2 or _3) to create a fresh table.### OpenAI```tsimport { OpenAI } from "https://esm.town/v/std/openai";const openai = new OpenAI();const completion = await openai.chat.completions.create({  messages: [    { role: "user", content: "Say hello in a creative way" },function widgetMeta(invoking?: string, invoked?: string) {  return {    "openai/outputTemplate": WIDGET_URI,    "openai/toolInvocation/invoking": invoking,    "openai/toolInvocation/invoked": invoked,    "openai/widgetAccessible": true,    "openai/resultCanProduceWidget": true,  } as const;}          _meta: {            ...widgetMeta(),            "openai/widgetCSP": {              connect_domains: [baseUrl, "https://esm.sh", "https://cdn.jsdelivr.net"],              resource_domains: [baseUrl, "https://esm.sh", "https://cdn.jsdelivr.net"],  _meta: widgetMeta("Loading messages...", "Messages loaded"),  handler: async (args, ctx) => {    const subject = ctx.request.params._meta?.["openai/subject"];    if (!subject) {      throw new Error("Missing openai/subject in request metadata");    }  _meta: widgetMeta("Loading message...", "Message loaded"),  handler: async ({ id }, ctx) => {    const subject = ctx.request.params._meta?.["openai/subject"];    if (!subject) {      throw new Error("Missing openai/subject in request metadata");    }    }    const subject = ctx.request.params._meta?.["openai/subject"];    if (!subject) {      throw new Error("Missing openai/subject in request metadata");    }- **Backend**: Hono + MCP Lite + Drizzle ORM + SQLite- **Widget**: React 19 + TanStack Router + OpenAI App SDK## Quick Start## Message ScopingMessages are automatically scoped using the `openai/subject` field that ChatGPT includes in request metadata. This provides authless data isolation - each subject gets its own message board.The scoping happens in tool handlers:```typescriptconst subject = ctx.request.params._meta?.["openai/subject"];const messages = await getMessages(subject);```The exact semantics of `openai/subject` are determined by ChatGPT.## Where do I go from here?type UnknownObject = Record<string, unknown>;export interface OpenAiGlobals<  ToolInput extends UnknownObject = UnknownObject,  ToolOutput extends UnknownObject = UnknownObject,}export interface OpenAIWidgetAPI<  ToolInput extends UnknownObject = UnknownObject,  ToolOutput extends UnknownObject = UnknownObject,  ToolResponseMetadata extends UnknownObject = UnknownObject,  WidgetState extends UnknownObject = UnknownObject,> extends OpenAiGlobals<    ToolInput,    ToolOutput,}export const SET_GLOBALS_EVENT_TYPE = "openai:set_globals";export interface SetGlobalsEvent  extends CustomEvent<{ globals: Partial<OpenAiGlobals> }> {  type: typeof SET_GLOBALS_EVENT_TYPE;}declare global {  interface Window {    openai: OpenAIWidgetAPI;  }}export function getOpenAI(): OpenAIWidgetAPI | undefined {  return window.openai;}import type { MessageListOutput } from "../../../shared/types.ts";import { useTheme, useToolOutput } from "../hooks.ts";import { getOpenAI } from "../openai-types.ts";export function MessageListWidget() {  async function handleAddMessage() {    await getOpenAI()?.sendFollowUpMessage({      prompt: "Add a new message",    });import type { MessageDetailOutput } from "../../../shared/types.ts";import { useTheme, useToolOutput } from "../hooks.ts";import { getOpenAI } from "../openai-types.ts";export function MessageDetailWidget() {  async function handleBackToList() {    await getOpenAI()?.sendFollowUpMessage({      prompt: "Show me all messages",    });/** @jsxImportSource https://esm.sh/react@19 */import { useCallback, useEffect, useState, useSyncExternalStore } from "https://esm.sh/react@19";import type { OpenAiGlobals, SetGlobalsEvent } from "./openai-types.ts";import { getOpenAI, SET_GLOBALS_EVENT_TYPE } from "./openai-types.ts";type UnknownObject = Record<string, unknown>;export function useOpenAiGlobal<K extends keyof OpenAiGlobals>(  key: K,): OpenAiGlobals[K] {  return useSyncExternalStore(    (onChange) => {      };    },    () => window.openai[key],  );}export function useToolOutput<T = UnknownObject>(): T | null {  return useOpenAiGlobal("toolOutput") as T | null;}export function useTheme(): "light" | "dark" {  return useOpenAiGlobal("theme");}  defaultState: T | (() => T),): readonly [T, (state: React.SetStateAction<T>) => void] {  const widgetStateFromWindow = useOpenAiGlobal("widgetState") as T | null;  const [widgetState, _setWidgetState] = useState<T>(() => {            : stateOrUpdater;        getOpenAI()?.setWidgetState(newState);        return newState;      });// BACKEND API VALimport { Hono } from "npm:hono@4.4.12";import { OpenAI } from "https://esm.town/v/std/openai";import type { Context } from "npm:hono@4.4.12";import { streamText } from "npm:hono@4.4.12/streaming";// --- BACKEND SERVICES ---const services = {  async callOpenAI(    systemPrompt: string,    userContent: string | object,      c = null,    } = options;    const openai = new OpenAI();    const messages: any[] = [{ role: "system", content: systemPrompt }, {      role: "user",    if (isJson) requestPayload.response_format = { type: "json_object" };    try {      const completion = await openai.chat.completions.create(requestPayload);      if (stream && c) {        return streamText(c, async (s) => {      return completion;    } catch (e) {      console.error(`Error calling OpenAI: ${e.message}`);      throw new Error("AI service failed.");    }  }  const userContent = `Company Context: ${JSON.stringify(company_context)}`;  return services.callOpenAI(config.prompts.INDUSTRY_GENERATOR, userContent, {    c,    isJson: true,    },  };  return services.callOpenAI(config.prompts.DYNAMIC_LIST_GENERATOR, payload, {    c,    isJson: true,  };  // We call OpenAI, expecting JSON  return services.callOpenAI(config.prompts.TASK_DEFINER, userContent, {    c,    isJson: true,  // in case the userPrompt logic diverges more significantly later.  if (category === "prompt_building") {    return services.callOpenAI(systemPrompt, userPrompt, { c, stream: true });  } else {    return services.callOpenAI(systemPrompt, userPrompt, { c, stream: true });  }});      briefing,    );    const criteriaCompletion = await services.callOpenAI(      "You are a helpful assistant following strict output rules.",      criteriaPrompt,      raw_output: first_draft,    };    return services.callOpenAI(config.prompts.EVALUATOR_AGENT, userContent, {      c,      isJson: true,## QA Critique${critique}`;  return services.callOpenAI(config.prompts.REFINER_AGENT, userContent, {    c,    stream: true,_2 or _3) to create a fresh table.### OpenAI```tsimport { OpenAI } from "https://esm.town/v/std/openai";const openai = new OpenAI();const completion = await openai.chat.completions.create({  messages: [    { role: "user", content: "Say hello in a creative way" },reconsumeralization
import { OpenAI } from "https://esm.town/v/std/openai";
import { sqlite } from "https://esm.town/v/stevekrouse/sqlite";
/** 
 * Practical Implementation of Collective Content Intelligence
 * Bridging advanced AI with collaborative content creation
 */
exp
kwhinnery_openai
lost1991
import { OpenAI } from "https://esm.town/v/std/openai";
export default async function(req: Request): Promise<Response> {
  if (req.method === "OPTIONS") {
    return new Response(null, {
      headers: {
        "Access-Control-Allow-Origin": "*",
OpenAI. Copy page Copy page. Copy this page as Markdown for LLMs. View as Markdown View this page as plain text. Open in ChatGPT Ask questions about this page. Use
OpenAI Codex. Copy page Copy page. Copy this page as Markdown for LLMs. View as Markdown View this page as plain text. Open in ChatGPT Ask questions about this page.
= $("p:nth-of-type(2)").first().text(); console.log(intro); Logs OpenAI is an American artificial intelligence (AI) research organization consisting of the non-profit OpenAI, Inc.[5] registered in Delaware and its for-profit subsidiary OpenAI Global, LLC.[6] One
await browser.close(); console.log(intro); Logs "OpenAI is an American artificial intelligence (AI) research laboratory consisting of the non-profit OpenAI Incorporated and its for-profit subsidiary corporation OpenAI Limited Partnership. OpenAI conducts AI
AI. SQLite Store and retrieve structured data Blob Storage Store and retrieve any data OpenAI Use the OpenAI API Email Send emails API and SDK. Section titled “API and SDK”
Val Town + Claude Web/Desktop ChatGPT Web/Desktop Val Town + ChatGPT Codex Val Town + OpenAI Codex Cursor Val Town + Cursor GitHub Copilot Val Town + GitHub Copilot Warp
else—just send it to a val. Val. Section titled “Val” AI enrichment, browser automation: std/openai Prompt an OpenAI model Clay Enrich user data with Clay Browserbase Search the web with
“Popular tools” Claude Code Val Town CLI + Claude Code Codex Val Town CLI + OpenAI Codex Cursor Val Town CLI + Cursor GitHub Copilot Val Town CLI + GitHub
code. Receiving an attachment and then sending it along. Receiving an attachment, sending it to OpenAI, and then emailing it. Headers. Section titled “Headers” You can set custom headers in
} from "https://esm.town/v/patrickjm/gpt3?v=4"; export let librarySecret = gpt3({ prompt: "what is the meaning of life?", openAiKey: Deno.env.get("openai"), }); Import security. If you’re importing someone else’s code, read the code first