Search

3,343 results found for openai (1915ms)

Code
3,248

Note: When changing a SQLite table's schema, change the table's name (e.g., add _2 or _3) to cre
### OpenAI
```ts
import { OpenAI } from "https://esm.town/v/std/openai";
const openai = new OpenAI();
const completion = await openai.chat.completions.create({
messages: [
{ role: "user", content: "Say hello in a creative way" },
Note: When changing a SQLite table's schema, change the table's name (e.g., add _2 or _3) to cre
### OpenAI
```ts
import { OpenAI } from "https://esm.town/v/std/openai";
const openai = new OpenAI();
const completion = await openai.chat.completions.create({
messages: [
{ role: "user", content: "Say hello in a creative way" },
}))
const { categorizeIssues: categorizeWithAI } = await import('./openai.ts')
return categorizeWithAI(processedIssues)
}
2. **Environment Variables** (left sidebar)
```bash
OPENAI_API_KEY=your_openai_api_key_here
```
## Customization
Edit the system prompt in `openai.ts` to change how issues are categorized.
## Output
import OpenAI from 'npm:openai'
const openai = new OpenAI({
apiKey: Deno.env.get('OPENAI_API_KEY')
})
export async function categorizeIssues(issues: any[]) {
const completion = await openai.chat.completions.create({
model: "gpt-4o-mini",
messages: [{ role: "user", content: `Analyze these GitHub issues and categorize them into a
const aiResponse = completion.choices[0].message.content
if (!aiResponse) throw new Error('OpenAI returned empty response')
try {
return JSON.parse(aiResponse)
} catch (error) {
console.error('Failed to parse OpenAI response:', aiResponse.substring(0, 1000) + '...')
// Try to repair truncated JSON by finding the last complete array
return JSON.parse(repairedResponse)
} catch (repairError) {
throw new Error(`Invalid JSON response from OpenAI: ${error.message}`)
}
}
join/tasks/main.tsx
14 matches
import { Hono } from "npm:hono@4.4.12";
// @ts-ignore
import { OpenAI } from "https://esm.town/v/std/openai?v=4";
// --- Banks 7/20/2025. For Leo & Niko. <3
// --- josh@dereticular.com
const userInput = `Occupation: ${occupation.title}, Task: ${task.task}`;
try {
const openai = new OpenAI();
const completion = await openai.chat.completions.create({
model: "gpt-4o",
messages: [{ role: "system", content: PROMPT_REFINER_SYSTEM_PROMPT }, { role: "user", cont
const userInput = `Occupation: ${occupation_title}, Task: ${task}`;
try {
const openai = new OpenAI();
const completion = await openai.chat.completions.create({
model: "gpt-4o",
messages: [{ role: "system", content: PROMPT_REFINER_SYSTEM_PROMPT }, { role: "user", cont
if (!refined_prompt) return c.json({ error: "refined_prompt is required" }, 400);
try {
const openai = new OpenAI();
const completion = await openai.chat.completions.create({
model: "gpt-4o",
messages: [{ role: "system", content: INPUT_EXTRACTOR_SYSTEM_PROMPT }, { role: "user", con
const userInput = JSON.stringify({ type: "occupations", industry: industry });
try {
const openai = new OpenAI();
const completion = await openai.chat.completions.create({
model: "gpt-4o",
messages: [
const userInput = JSON.stringify({ type: "tasks", occupation: occupation_name });
try {
const openai = new OpenAI();
const completion = await openai.chat.completions.create({
model: "gpt-4o",
messages: [
try {
const openai = new OpenAI();
const agentCompletion = await openai.chat.completions.create({
model: "gpt-4o",
messages: [
throw new Error("The agent returned no content.");
}
const htmlCompletion = await openai.chat.completions.create({
model: "gpt-4o",
messages: [
const headerKey = req.headers['x-goog-api-key'];
// Check for Bearer token in Authorization header (OpenAI style)
if (authHeader && authHeader.startsWith('Bearer ')) {
const token = authHeader.substring(7);
try {
for await (const chunk of stream) {
const openAIChunk = toOpenAIStreamChunk(chunk, model);
const chunkText = openAIChunk.choices[0].delta.content || "";
if (chunkText) {
process.stdout.write(chunkText);
}
res.write(`data: ${JSON.stringify(openAIChunk)}\n\n`);
}
// Send the final [DONE] message according to OpenAI spec
res.write('data: [DONE]\n\n');
} catch (error) {
const geminiResponse = await service.generateContent(model, requestBody);
console.log('[Server] Raw Gemini Unary Response:', JSON.stringify(geminiResponse, null, 2));
const openAIResponse = toOpenAIChatCompletion(geminiResponse, model);
console.log('[Server Response Unary]');
process.stdout.write('> ');
process.stdout.write('\n');
res.writeHead(200, { 'Content-Type': 'application/json' });
res.end(JSON.stringify(openAIResponse));
const expiryDate = service.authClient.credentials.expiry_date;
console.log(`[Auth Token] Time until expiry: ${formatExpiryTime(expiryDate)}`);
if (req.method === 'GET' && requestUrl.pathname === '/v1/models') {
const models = await service.listModels();
const openAIModels = toOpenAIModelList(models.models.map(m => m.name.replace('models
res.writeHead(200, { 'Content-Type': 'application/json' });
const expiryDate = service.authClient.credentials.expiry_date;
console.log(`[Auth Token] Time until expiry: ${formatExpiryTime(expiryDate)}`);
return res.end(JSON.stringify(openAIModels));
}
if (req.method === 'POST' && requestUrl.pathname === '/v1/chat/completions') {
const openaiRequest = await getRequestBody(req);
const model = openaiRequest.model;
const geminiRequest = toGeminiRequest(openaiRequest);
if (PROMPT_LOG_MODE !== 'none') {
}
if (openaiRequest.stream) {
await handleStreamRequest(res, service, model, geminiRequest);
} else {
server.listen(SERVER_PORT, HOST, () => {
console.log(`--- OpenAI-Compatible Server Configuration ---`);
console.log(` Host: ${HOST}`);
console.log(` Port: ${SERVER_PORT}`);
Note: When changing a SQLite table's schema, change the table's name (e.g., add _2 or _3) to cre
### OpenAI
```ts
import { OpenAI } from "https://esm.town/v/std/openai";
const openai = new OpenAI();
const completion = await openai.chat.completions.create({
messages: [
{ role: "user", content: "Say hello in a creative way" },
import { Hono } from "npm:hono@4.4.12";
// @ts-ignore
import { OpenAI } from "https://esm.town/v/std/openai?v=4";
// --- HIERARCHICAL DATABASE (Emptied as requested) ---
const userInput = `Occupation: ${occupation.title}, Task: ${task.task}`;
try {
const openai = new OpenAI();
const completion = await openai.chat.completions.create({
model: "gpt-4o",
messages: [{ role: "system", content: PROMPT_REFINER_SYSTEM_PROMPT }, { role: "user", cont
if (!refined_prompt) return c.json({ error: "refined_prompt is required" }, 400);
try {
const openai = new OpenAI();
const completion = await openai.chat.completions.create({
model: "gpt-4o",
messages: [{ role: "system", content: INPUT_EXTRACTOR_SYSTEM_PROMPT }, { role: "user", con
}
try {
const openai = new OpenAI();
const agentCompletion = await openai.chat.completions.create({
model: "gpt-4o",
messages: [
throw new Error("The agent returned no content.");
}
const markdownCompletion = await openai.chat.completions.create({
model: "gpt-4o",
messages: [
import { Hono } from "npm:hono@4.4.12";
// @ts-ignore
import { OpenAI } from "https://esm.town/v/std/openai?v=4";
// --- HIERARCHICAL DATABASE (Emptied as requested) ---
try {
const openai = new OpenAI();
const completion = await openai.chat.completions.create({
model: "gpt-4o",
messages: [{ role: "system", content: PROMPT_REFINER_SYSTEM_PROMPT }, { role: "user", cont
try {
const openai = new OpenAI();
const completion = await openai.chat.completions.create({
model: "gpt-4o",
messages: [{ role: "system", content: INPUT_EXTRACTOR_SYSTEM_PROMPT }, { role: "user", con
try {
const openai = new OpenAI();
const agentCompletion = await openai.chat.completions.create({
model: "gpt-4o",
messages: [
}
const latexCompletion = await openai.chat.completions.create({
model: "gpt-4o",
messages: [