Search
Code3,251
Note: When changing a SQLite table's schema, change the table's name (e.g., add _2 or _3) to create a fresh table.### OpenAI```tsimport { OpenAI } from "https://esm.town/v/std/openai";const openai = new OpenAI();const completion = await openai.chat.completions.create({messages: [{ role: "user", content: "Say hello in a creative way" },
export const COMPETITORS = [ { name: "OpenAI", url: "https://openai.com", description: "OpenAI homepage" }, {
- [ ] Get OpenTownie or Gemini or Claude or OpenAI to synthesize the core of these patterns into a prompt we can use to make more ReactRouter apps, such as...- [ ] Convert this or into the basic react router guest book (and preserve this forum app in another project?)- [ ] To what extent can these patterns be packaged up into a Val Town Router project? Would be neat to get the version pinning thing all centralized, can this as-a-library be that centralized place?
_2 or_3) to create a fresh table.### OpenAI```tsimport { OpenAI } from "https://esm.town/v/std/openai";const openai = new OpenAI();const completion = await openai.chat.completions.create({ messages: [ { role: "user", content: "Say hello in a creative way" },
import { email } from "https://esm.town/v/std/email";import { OpenAI } from "https://esm.town/v/std/openai";export default async function (incomingEmail: Email) { try { const openai = new OpenAI(); const emailContent = incomingEmail.text || incomingEmail.html || "No content available"; const completion = await openai.chat.completions.create({ messages: [ {
// @ts-ignoreimport { OpenAI } from "https://esm.town/v/std/openai?v=4";// --- AI BEHAVIORAL GUIDELINES --- <footer class="text-center mt-6 text-gray-500 text-sm"> <p>Pestodon © 2025. Powered by <a href="${sourceUrl}" class="text-blue-400 hover:underline" target="_blank">Val Town</a> & OpenAI.</p> </footer> if (req.method === "POST" && action === "enhanceNotes") { try { const openai = new OpenAI(); const body = await req.json(); } const completion = await openai.chat.completions.create({ model: "gpt-4o", messages: [
2. **Environment Variables** (left sidebar) ```bash OPENAI_API_KEY=your_openai_api_key_here ```## CustomizationEdit the system prompt in `openai.ts` to change how issues are categorized.## Output
})) const { categorizeIssues: categorizeWithAI } = await import('./openai.ts') return categorizeWithAI(processedIssues)}
import OpenAI from 'npm:openai'const openai = new OpenAI({ apiKey: Deno.env.get('OPENAI_API_KEY')})export async function categorizeIssues(issues: any[]) { const completion = await openai.chat.completions.create({ model: "gpt-4o-mini", messages: [{ role: "user", content: `Analyze these GitHub issues and categorize them into a two-level system: const aiResponse = completion.choices[0].message.content if (!aiResponse) throw new Error('OpenAI returned empty response') try { return JSON.parse(aiResponse) } catch (error) { console.error('Failed to parse OpenAI response:', aiResponse.substring(0, 1000) + '...') // Try to repair truncated JSON by finding the last complete array return JSON.parse(repairedResponse) } catch (repairError) { throw new Error(`Invalid JSON response from OpenAI: ${error.message}`) } }
import { OpenAI } from "npm:openai";import type { AppMentionEvent,import { slack } from "./slack.ts";import { readFile } from "https://esm.town/v/std/utils/index.ts";import { zodTextFormat } from "npm:openai/helpers/zod";import { z } from "npm:zod@3.23.8";];const openai = new OpenAI({ apiKey: Deno.env.get("OPENAI_API_KEY") });async function getHistory( event: AppMentionEvent | GenericMessageEvent,): Promise<OpenAI.Responses.ResponseInput> { const { messages } = await slack.conversations.replies({ channel: event.channel,async function shouldReply( threadHistory: OpenAI.Responses.ResponseInput,): Promise<boolean> { const ShouldReplyOjbect = z.object({ }); const response = await openai.responses.parse({ model: "gpt-4o-2024-08-06", input: [export async function reply( event: AppMentionEvent | GenericMessageEvent, messages: OpenAI.Responses.ResponseInput,) { // reply in thread or start one }); const input: OpenAI.Responses.ResponseInput = [ { role: "developer", content: SYSTEM_PROMPT }, ...messages, for (let step = 0; step < MAX_STEPS; step++) { const response = await openai.responses.create({ model: "gpt-4.1", input,
reconsumeralization
import { OpenAI } from "https://esm.town/v/std/openai";
import { sqlite } from "https://esm.town/v/stevekrouse/sqlite";
/**
* Practical Implementation of Collective Content Intelligence
* Bridging advanced AI with collaborative content creation
*/
exp
kwhinnery_openai
lost1991
import { OpenAI } from "https://esm.town/v/std/openai";
export default async function(req: Request): Promise<Response> {
if (req.method === "OPTIONS") {
return new Response(null, {
headers: {
"Access-Control-Allow-Origin": "*",
No docs found