Search

3,360 results found for openai (1960ms)

Code
3,265

2. **Environment Variables** (left sidebar)
```bash
OPENAI_API_KEY=your_openai_api_key_here
```
## Customization
Edit the system prompt in `openai.ts` to change how issues are categorized.
## Output
}))
const { categorizeIssues: categorizeWithAI } = await import('./openai.ts')
return categorizeWithAI(processedIssues)
}
import OpenAI from 'npm:openai'
const openai = new OpenAI({
apiKey: Deno.env.get('OPENAI_API_KEY')
})
export async function categorizeIssues(issues: any[]) {
const completion = await openai.chat.completions.create({
model: "gpt-4o-mini",
messages: [{ role: "user", content: `Analyze these GitHub issues and categorize them into a
const aiResponse = completion.choices[0].message.content
if (!aiResponse) throw new Error('OpenAI returned empty response')
try {
return JSON.parse(aiResponse)
} catch (error) {
console.error('Failed to parse OpenAI response:', aiResponse.substring(0, 1000) + '...')
// Try to repair truncated JSON by finding the last complete array
return JSON.parse(repairedResponse)
} catch (repairError) {
throw new Error(`Invalid JSON response from OpenAI: ${error.message}`)
}
}
import { OpenAI } from "npm:openai";
import type {
AppMentionEvent,
import { slack } from "./slack.ts";
import { readFile } from "https://esm.town/v/std/utils/index.ts";
import { zodTextFormat } from "npm:openai/helpers/zod";
import { z } from "npm:zod@3.23.8";
];
const openai = new OpenAI({ apiKey: Deno.env.get("OPENAI_API_KEY") });
async function getHistory(
event: AppMentionEvent | GenericMessageEvent,
): Promise<OpenAI.Responses.ResponseInput> {
const { messages } = await slack.conversations.replies({
channel: event.channel,
async function shouldReply(
threadHistory: OpenAI.Responses.ResponseInput,
): Promise<boolean> {
const ShouldReplyOjbect = z.object({
});
const response = await openai.responses.parse({
model: "gpt-4o-2024-08-06",
input: [
export async function reply(
event: AppMentionEvent | GenericMessageEvent,
messages: OpenAI.Responses.ResponseInput,
) {
// reply in thread or start one
});
const input: OpenAI.Responses.ResponseInput = [
{ role: "developer", content: SYSTEM_PROMPT },
...messages,
for (let step = 0; step < MAX_STEPS; step++) {
const response = await openai.responses.create({
model: "gpt-4.1",
input,
import { OpenAI } from "https://esm.town/v/std/openai";
import { PDFExtract } from "npm:pdf.js-extract";
const action = url.searchParams.get("action");
const sourceUrl = `https://${url.hostname}${url.pathname}`;
const openai = new OpenAI();
const MAX_TEXT_ANALYZE = 30000;
const jsonResponse = (body: object, status: number) =>
async function callAI(prompt: string, userMessage: string, isJson: boolean = true) {
const completion = await openai.chat.completions.create({
model: "gpt-4o",
messages: [{ role: "system", content: prompt }, { role: "user", content: userMessage }],
conversation.unshift(contextMessage);
}
const completion = await openai.chat.completions.create({
model: "gpt-4o",
messages: [systemMessage, ...conversation],
import { OpenAI } from "https://esm.town/v/std/openai";
// --- TYPE DEFINITIONS ---
}
export default async function(req: Request): Promise<Response> {
const openai = new OpenAI();
const url = new URL(req.url);
const CORS_HEADERS = {
case "synthesizeProject": {
const synthesisContent = `Current Date: ${new Date().toISOString().split("T")[0]}\n\nG
const completion = await openai.chat.completions.create({
model,
messages: [{ role: "system", content: PROJECT_SYNTHESIS_PROMPT }, {
JSON.stringify(body.tasks, null, 2)
}`;
const completion = await openai.chat.completions.create({
model,
messages: [{ role: "system", content: DAILY_REBALANCE_PROMPT }, {
conversation.unshift(contextMessage);
}
const completion = await openai.chat.completions.create({
model,
messages: [{ role: "system", content: CHAT_PROMPT }, ...conversation],
import { openai } from "npm:@ai-sdk/openai";
import { generateText, tool } from "npm:ai";
import { z } from "npm:zod@3.25.75";
const { text } = await generateText({
model: openai("gpt-4.1"),
tools: {
checkWeather: tool({
Note: When changing a SQLite table's schema, change the table's name (e.g., add _2 or _3) to cre
### OpenAI
```ts
import { OpenAI } from "https://esm.town/v/std/openai";
const openai = new OpenAI();
const completion = await openai.chat.completions.create({
messages: [
{ role: "user", content: "Say hello in a creative way" },
*/
import { OpenAI } from "https://esm.town/v/std/openai";
import { Octokit } from "https://esm.sh/@octokit/rest@20.0.2";
import { WebClient } from "https://esm.sh/@slack/web-api@7.0.2";
async function isBugReportLLM(text: string): Promise<boolean> {
try {
// Check if OpenAI API key is available
if (!Deno.env.get("OPENAI_API_KEY")) {
console.warn("OpenAI API key not found - bug detection disabled");
return false;
}
const openai = new OpenAI();
const completion = await openai.chat.completions.create({
messages: [
{
async function findRelatedIssues(slackMessage: string, issues: any[]): Promise<any[]> {
try {
// Check if OpenAI API key is available
if (!Deno.env.get("OPENAI_API_KEY")) {
return [];
}
}).join("\n\n");
const openai = new OpenAI();
const completion = await openai.chat.completions.create({
messages: [
{
1. When a new message is posted in a configured Slack channel (ie. #bugs, or #support), Slack se
2. The val makes an OpenAI call to determine if the message is a bug
tHub for semantically related open issues with a separate OpenAI call
4. It posts a comment in the Slack thread with links to related GitHub issues, with a "Relevance