Search
Code3,898
Note: When changing a SQLite table's schema, change the table's name (e.g., add _2 or _3) to create a fresh table.### OpenAI```tsimport { OpenAI } from "https://esm.town/v/std/openai";const openai = new OpenAI();const completion = await openai.chat.completions.create({ messages: [ { role: "user", content: "Say hello in a creative way" },function widgetMeta(invoking?: string, invoked?: string) { return { "openai/outputTemplate": WIDGET_URI, "openai/toolInvocation/invoking": invoking, "openai/toolInvocation/invoked": invoked, "openai/widgetAccessible": true, "openai/resultCanProduceWidget": true, } as const;} _meta: { ...widgetMeta(), "openai/widgetCSP": { connect_domains: [baseUrl, "https://esm.sh", "https://cdn.jsdelivr.net"], resource_domains: [baseUrl, "https://esm.sh", "https://cdn.jsdelivr.net"], _meta: widgetMeta("Loading messages...", "Messages loaded"), handler: async (args, ctx) => { const subject = ctx.request.params._meta?.["openai/subject"]; if (!subject) { throw new Error("Missing openai/subject in request metadata"); } _meta: widgetMeta("Loading message...", "Message loaded"), handler: async ({ id }, ctx) => { const subject = ctx.request.params._meta?.["openai/subject"]; if (!subject) { throw new Error("Missing openai/subject in request metadata"); } } const subject = ctx.request.params._meta?.["openai/subject"]; if (!subject) { throw new Error("Missing openai/subject in request metadata"); }type UnknownObject = Record<string, unknown>;export interface OpenAiGlobals< ToolInput extends UnknownObject = UnknownObject, ToolOutput extends UnknownObject = UnknownObject,}export interface OpenAIWidgetAPI< ToolInput extends UnknownObject = UnknownObject, ToolOutput extends UnknownObject = UnknownObject, ToolResponseMetadata extends UnknownObject = UnknownObject, WidgetState extends UnknownObject = UnknownObject,> extends OpenAiGlobals< ToolInput, ToolOutput,}export const SET_GLOBALS_EVENT_TYPE = "openai:set_globals";export interface SetGlobalsEvent extends CustomEvent<{ globals: Partial<OpenAiGlobals> }> { type: typeof SET_GLOBALS_EVENT_TYPE;}declare global { interface Window { openai: OpenAIWidgetAPI; }}export function getOpenAI(): OpenAIWidgetAPI | undefined { return window.openai;}import type { MessageListOutput } from "../../../shared/types.ts";import { useTheme, useToolOutput } from "../hooks.ts";import { getOpenAI } from "../openai-types.ts";export function MessageListWidget() { async function handleAddMessage() { await getOpenAI()?.sendFollowUpMessage({ prompt: "Add a new message", });import type { MessageDetailOutput } from "../../../shared/types.ts";import { useTheme, useToolOutput } from "../hooks.ts";import { getOpenAI } from "../openai-types.ts";export function MessageDetailWidget() { async function handleBackToList() { await getOpenAI()?.sendFollowUpMessage({ prompt: "Show me all messages", });/** @jsxImportSource https://esm.sh/react@19 */import { useCallback, useEffect, useState, useSyncExternalStore } from "https://esm.sh/react@19";import type { OpenAiGlobals, SetGlobalsEvent } from "./openai-types.ts";import { getOpenAI, SET_GLOBALS_EVENT_TYPE } from "./openai-types.ts";type UnknownObject = Record<string, unknown>;export function useOpenAiGlobal<K extends keyof OpenAiGlobals>( key: K,): OpenAiGlobals[K] { return useSyncExternalStore( (onChange) => { }; }, () => window.openai[key], );}export function useToolOutput<T = UnknownObject>(): T | null { return useOpenAiGlobal("toolOutput") as T | null;}export function useTheme(): "light" | "dark" { return useOpenAiGlobal("theme");} defaultState: T | (() => T),): readonly [T, (state: React.SetStateAction<T>) => void] { const widgetStateFromWindow = useOpenAiGlobal("widgetState") as T | null; const [widgetState, _setWidgetState] = useState<T>(() => { : stateOrUpdater; getOpenAI()?.setWidgetState(newState); return newState; });// BACKEND API VALimport { Hono } from "npm:hono@4.4.12";import { OpenAI } from "https://esm.town/v/std/openai";import type { Context } from "npm:hono@4.4.12";import { streamText } from "npm:hono@4.4.12/streaming";// --- BACKEND SERVICES ---const services = { async callOpenAI( systemPrompt: string, userContent: string | object, c = null, } = options; const openai = new OpenAI(); const messages: any[] = [{ role: "system", content: systemPrompt }, { role: "user", if (isJson) requestPayload.response_format = { type: "json_object" }; try { const completion = await openai.chat.completions.create(requestPayload); if (stream && c) { return streamText(c, async (s) => { return completion; } catch (e) { console.error(`Error calling OpenAI: ${e.message}`); throw new Error("AI service failed."); } } const userContent = `Company Context: ${JSON.stringify(company_context)}`; return services.callOpenAI(config.prompts.INDUSTRY_GENERATOR, userContent, { c, isJson: true, }, }; return services.callOpenAI(config.prompts.DYNAMIC_LIST_GENERATOR, payload, { c, isJson: true, }; // We call OpenAI, expecting JSON return services.callOpenAI(config.prompts.TASK_DEFINER, userContent, { c, isJson: true, // in case the userPrompt logic diverges more significantly later. if (category === "prompt_building") { return services.callOpenAI(systemPrompt, userPrompt, { c, stream: true }); } else { return services.callOpenAI(systemPrompt, userPrompt, { c, stream: true }); }}); briefing, ); const criteriaCompletion = await services.callOpenAI( "You are a helpful assistant following strict output rules.", criteriaPrompt, raw_output: first_draft, }; return services.callOpenAI(config.prompts.EVALUATOR_AGENT, userContent, { c, isJson: true,## QA Critique${critique}`; return services.callOpenAI(config.prompts.REFINER_AGENT, userContent, { c, stream: true,_2 or _3) to create a fresh table.### OpenAI```tsimport { OpenAI } from "https://esm.town/v/std/openai";const openai = new OpenAI();const completion = await openai.chat.completions.create({ messages: [ { role: "user", content: "Say hello in a creative way" },// OpenAI + Orama Strategy: Semantic search using OpenAI embeddings and Orama// Faster than JigsawStack (~100-200ms vs ~550ms for query embeddings)import { create, insertMultiple, search } from "npm:@orama/orama@latest";// OpenAI embeddings functionexport const generateEmbeddings = async (content: string): Promise<number[] | null> => { const OPENAI_API_KEY = Deno.env.get("OPENAI_API_KEY"); const OPENAI_API_URL = "https://api.openai.com/v1/embeddings"; if (!OPENAI_API_KEY) { console.warn("OPENAI_API_KEY not found - embeddings disabled"); return null; } try { const response = await fetch(OPENAI_API_URL, { method: "POST", headers: { "Authorization": `Bearer ${OPENAI_API_KEY}`, "Content-Type": "application/json", }, if (!response.ok) { const errorText = await response.text().catch(() => response.statusText); throw new Error(`OpenAI API error: ${response.status} ${response.statusText} - ${errorText.substring(0, 200)}`); } } catch (error) { const errorMessage = error instanceof Error ? error.message : String(error); console.error("OpenAI embeddings failed:", errorMessage); return null; }export const searchStrategy: SearchStrategy = { name: "openai-orama", description: "Semantic search using OpenAI embeddings with Orama vector search (faster than JigsawStack)", search: async (query: string, pages: Page[], options: SearchOptions = {}): Promise<SearchResult[]> => { const limit = options.limit || 10; snippet: page ? generateSnippet(page.content, queryWords, query.toLowerCase()) : hit.document.content.substring(0, 200) + '...', metadata: { strategy: "openai-orama", similarity: hit.score, ...(enableTiming && { timings }),// OpenAI + Cosine Similarity Strategy: Direct cosine similarity calculation// Fastest for small datasets (<100 pages) - no DB overheadimport { generateSnippet, cosineSimilarity } from "./utils.ts";// OpenAI embeddings functionexport const generateEmbeddings = async (content: string): Promise<number[] | null> => { const OPENAI_API_KEY = Deno.env.get("OPENAI_API_KEY"); const OPENAI_API_URL = "https://api.openai.com/v1/embeddings"; if (!OPENAI_API_KEY) { console.warn("OPENAI_API_KEY not found - embeddings disabled"); return null; } try { const response = await fetch(OPENAI_API_URL, { method: "POST", headers: { "Authorization": `Bearer ${OPENAI_API_KEY}`, "Content-Type": "application/json", }, if (!response.ok) { const errorText = await response.text().catch(() => response.statusText); throw new Error(`OpenAI API error: ${response.status} ${response.statusText} - ${errorText.substring(0, 200)}`); } } catch (error) { const errorMessage = error instanceof Error ? error.message : String(error); console.error("OpenAI embeddings failed:", errorMessage); return null; }export const searchStrategy: SearchStrategy = { name: "openai-cosine", description: "Semantic search using OpenAI embeddings with direct cosine similarity (fastest for small datasets)", search: async (query: string, pages: Page[], options: SearchOptions = {}): Promise<SearchResult[]> => { const limit = options.limit || 10; snippet: generateSnippet(page.content, queryWords, query.toLowerCase()), metadata: { strategy: "openai-cosine", similarity, ...(enableTiming && { timings }),reconsumeralization
import { OpenAI } from "https://esm.town/v/std/openai";
import { sqlite } from "https://esm.town/v/stevekrouse/sqlite";
/**
* Practical Implementation of Collective Content Intelligence
* Bridging advanced AI with collaborative content creation
*/
exp
kwhinnery_openai
lost1991
import { OpenAI } from "https://esm.town/v/std/openai";
export default async function(req: Request): Promise<Response> {
if (req.method === "OPTIONS") {
return new Response(null, {
headers: {
"Access-Control-Allow-Origin": "*",
No docs found