Search

3,768 results found for openai (5627ms)

Code
3,665

function widgetMeta(invoking?: string, invoked?: string) {
return {
"openai/outputTemplate": WIDGET_URI,
"openai/toolInvocation/invoking": invoking,
"openai/toolInvocation/invoked": invoked,
"openai/widgetAccessible": true,
"openai/resultCanProduceWidget": true,
} as const;
}
_meta: {
...widgetMeta(),
"openai/widgetCSP": {
connect_domains: [baseUrl, "https://esm.sh", "https://cdn.jsdelivr.net"],
resource_domains: [baseUrl, "https://esm.sh", "https://cdn.jsdelivr.net"],
_meta: widgetMeta("Loading messages...", "Messages loaded"),
handler: async (args, ctx) => {
const subject = ctx.request.params._meta?.["openai/subject"];
if (!subject) {
throw new Error("Missing openai/subject in request metadata");
}
_meta: widgetMeta("Loading message...", "Message loaded"),
handler: async ({ id }, ctx) => {
const subject = ctx.request.params._meta?.["openai/subject"];
if (!subject) {
throw new Error("Missing openai/subject in request metadata");
}
}
const subject = ctx.request.params._meta?.["openai/subject"];
if (!subject) {
throw new Error("Missing openai/subject in request metadata");
}
- **Backend**: Hono + MCP Lite + Drizzle ORM + SQLite
- **Widget**: React 19 + TanStack Router + OpenAI App SDK
## Quick Start
## Message Scoping
Messages are automatically scoped using the `openai/subject` field that ChatGPT includes in requ
The scoping happens in tool handlers:
```typescript
const subject = ctx.request.params._meta?.["openai/subject"];
const messages = await getMessages(subject);
```
The exact semantics of `openai/subject` are determined by ChatGPT.
## Where do I go from here?
type UnknownObject = Record<string, unknown>;
export interface OpenAiGlobals<
ToolInput extends UnknownObject = UnknownObject,
ToolOutput extends UnknownObject = UnknownObject,
}
export interface OpenAIWidgetAPI<
ToolInput extends UnknownObject = UnknownObject,
ToolOutput extends UnknownObject = UnknownObject,
ToolResponseMetadata extends UnknownObject = UnknownObject,
WidgetState extends UnknownObject = UnknownObject,
> extends OpenAiGlobals<
ToolInput,
ToolOutput,
}
export const SET_GLOBALS_EVENT_TYPE = "openai:set_globals";
export interface SetGlobalsEvent
extends CustomEvent<{ globals: Partial<OpenAiGlobals> }> {
type: typeof SET_GLOBALS_EVENT_TYPE;
}
declare global {
interface Window {
openai: OpenAIWidgetAPI;
}
}
export function getOpenAI(): OpenAIWidgetAPI | undefined {
return window.openai;
}
import type { MessageListOutput } from "../../../shared/types.ts";
import { useTheme, useToolOutput } from "../hooks.ts";
import { getOpenAI } from "../openai-types.ts";
export function MessageListWidget() {
async function handleAddMessage() {
await getOpenAI()?.sendFollowUpMessage({
prompt: "Add a new message",
});
import type { MessageDetailOutput } from "../../../shared/types.ts";
import { useTheme, useToolOutput } from "../hooks.ts";
import { getOpenAI } from "../openai-types.ts";
export function MessageDetailWidget() {
async function handleBackToList() {
await getOpenAI()?.sendFollowUpMessage({
prompt: "Show me all messages",
});
/** @jsxImportSource https://esm.sh/react@19 */
import { useCallback, useEffect, useState, useSyncExternalStore } from "https://esm.sh/react@19"
import type { OpenAiGlobals, SetGlobalsEvent } from "./openai-types.ts";
import { getOpenAI, SET_GLOBALS_EVENT_TYPE } from "./openai-types.ts";
type UnknownObject = Record<string, unknown>;
export function useOpenAiGlobal<K extends keyof OpenAiGlobals>(
key: K,
): OpenAiGlobals[K] {
return useSyncExternalStore(
(onChange) => {
};
},
() => window.openai[key],
);
}
export function useToolOutput<T = UnknownObject>(): T | null {
return useOpenAiGlobal("toolOutput") as T | null;
}
export function useTheme(): "light" | "dark" {
return useOpenAiGlobal("theme");
}
defaultState: T | (() => T),
): readonly [T, (state: React.SetStateAction<T>) => void] {
const widgetStateFromWindow = useOpenAiGlobal("widgetState") as T | null;
const [widgetState, _setWidgetState] = useState<T>(() => {
: stateOrUpdater;
getOpenAI()?.setWidgetState(newState);
return newState;
});
// BACKEND API VAL
import { Hono } from "npm:hono@4.4.12";
import { OpenAI } from "https://esm.town/v/std/openai";
import type { Context } from "npm:hono@4.4.12";
import { streamText } from "npm:hono@4.4.12/streaming";
// --- BACKEND SERVICES ---
const services = {
async callOpenAI(
systemPrompt: string,
userContent: string | object,
c = null,
} = options;
const openai = new OpenAI();
const messages: any[] = [{ role: "system", content: systemPrompt }, {
role: "user",
if (isJson) requestPayload.response_format = { type: "json_object" };
try {
const completion = await openai.chat.completions.create(requestPayload);
if (stream && c) {
return streamText(c, async (s) => {
return completion;
} catch (e) {
console.error(`Error calling OpenAI: ${e.message}`);
throw new Error("AI service failed.");
}
}
const userContent = `Company Context: ${JSON.stringify(company_context)}`;
return services.callOpenAI(config.prompts.INDUSTRY_GENERATOR, userContent, {
c,
isJson: true,
},
};
return services.callOpenAI(config.prompts.DYNAMIC_LIST_GENERATOR, payload, {
c,
isJson: true,
};
// We call OpenAI, expecting JSON
return services.callOpenAI(config.prompts.TASK_DEFINER, userContent, {
c,
isJson: true,
// in case the userPrompt logic diverges more significantly later.
if (category === "prompt_building") {
return services.callOpenAI(systemPrompt, userPrompt, { c, stream: true });
} else {
return services.callOpenAI(systemPrompt, userPrompt, { c, stream: true });
}
});
briefing,
);
const criteriaCompletion = await services.callOpenAI(
"You are a helpful assistant following strict output rules.",
criteriaPrompt,
raw_output: first_draft,
};
return services.callOpenAI(config.prompts.EVALUATOR_AGENT, userContent, {
c,
isJson: true,
## QA Critique
${critique}`;
return services.callOpenAI(config.prompts.REFINER_AGENT, userContent, {
c,
stream: true,
_2 or _3) to create a fresh table.
### OpenAI
```ts
import { OpenAI } from "https://esm.town/v/std/openai";
const openai = new OpenAI();
const completion = await openai.chat.completions.create({
messages: [
{ role: "user", content: "Say hello in a creative way" },
// import { searchStrategy, generateEmbeddings } from "./placeholder.ts";
// import { searchStrategy, generateEmbeddings } from "./jigsawstack-orama.ts"; // ~550ms query
// import { searchStrategy, generateEmbeddings } from "./openai-orama.ts"; // ~100-200ms query e
// import { searchStrategy, generateEmbeddings } from "./transformers-cosine.ts"; // ~10-30ms (b
import { searchStrategy, generateEmbeddings } from "./openai-cosine.ts"; // ~100-200ms query emb
// Main search function - comment/uncomment imports above to switch strategies
// "https://console.groq.com/docs/model/moonshotai/kimi-k2-instruct.md",
// "https://console.groq.com/docs/model/moonshotai/kimi-k2-instruct-0905.md",
// "https://console.groq.com/docs/model/openai/gpt-oss-120b.md",
// "https://console.groq.com/docs/model/openai/gpt-oss-20b.md",
// "https://console.groq.com/docs/model/openai/gpt-oss-safeguard-20b.md",
// "https://console.groq.com/docs/model/playai-tts.md",
// "https://console.groq.com/docs/model/playai-tts-arabic.md",
// "https://console.groq.com/docs/model/whisper-large-v3-turbo.md",
// "https://console.groq.com/docs/models.md",
// "https://console.groq.com/docs/openai.md",
// "https://console.groq.com/docs/overview.md",
// "https://console.groq.com/docs/parallel.md",

Vals

88
View more
toowired
openaiproxy
 
Public
marcaureledubois
editImageWithOpenAI
 
Public
toowired
ttsGeneration
Text-to-speech generation using OpenAI TTS API
Public
jubertioai
hello-realtime
Sample app for the OpenAI Realtime API
Public
biowaffeln
openai-demo
 
Public

Docs

12
View more
discordWebhookWeatherHyd - send weather updates to Discord. weather_forecast_in_the_morning - weather forecast on Telegram. weatherBot - OpenAI Weather Bot via function calling. aqi - email alerts when AQI is unhealthy near
You’ll probably want to: Register a new Slash Command. Connect your bot to APIs like OpenAI’s GPT or Dall-E. Come join us in the Val Town Discord if you get