Search

25 results found for openai (75ms)

Code
25

app.post("/", async (c) => {
// Early API key check to avoid confusing when using the tester ui
if (!Deno.env.get("OPENAI_API_KEY")) {
return c.json(
{ error: "Add OPENAI_API_KEY in Environment Variables" },
503,
);
// Test endpoint for the welcome UI
app.post("/test", async (c) => {
if (!Deno.env.get("OPENAI_API_KEY")) {
return c.json(
{ error: "Add OPENAI_API_KEY in Environment Variables" },
503,
);
// Reanalyze a lead
app.post("/lead/:id/reanalyze", async (c) => {
if (!Deno.env.get("OPENAI_API_KEY")) {
return c.json(
{ error: "Add OPENAI_API_KEY in Environment Variables" },
503,
);
import { readFile } from "https://esm.town/v/std/utils/index.ts";
import { Agent, run, RunResult, webSearchTool } from "npm:@openai/agents@0.3.0";
import { getLeadById, storeLead, updateLeadOutput } from "./db.ts";
import { emailsFromGitHubUsername } from "https://esm.town/v/dcm31/github-user-email/index.ts";
1. Click [**Remix**](/?intent=remix)
2. Add environment variables:
- `OPENAI_API_KEY` — for AI lead qualification
- `GITHUB_TOKEN` — for accessing GitHub API
([create one here](https://github.com/settings/tokens))
1. Click [**Remix**](/?intent=remix)
2. Add environment variables:
- `OPENAI_API_KEY` — for AI lead qualification
- `GITHUB_TOKEN` — for accessing GitHub API
([create one here](https://github.com/settings/tokens))
app.post("/", async (c) => {
// Early API key check to avoid confusing when using the tester ui
if (!Deno.env.get("OPENAI_API_KEY")) {
return c.json(
{ error: "Add OPENAI_API_KEY in Environment Variables" },
503,
);
// Test endpoint for the welcome UI
app.post("/test", async (c) => {
if (!Deno.env.get("OPENAI_API_KEY")) {
return c.json(
{ error: "Add OPENAI_API_KEY in Environment Variables" },
503,
);
// Reanalyze a lead
app.post("/lead/:id/reanalyze", async (c) => {
if (!Deno.env.get("OPENAI_API_KEY")) {
return c.json(
{ error: "Add OPENAI_API_KEY in Environment Variables" },
503,
);
import { readFile } from "https://esm.town/v/std/utils/index.ts";
import { Agent, run, RunResult, webSearchTool } from "npm:@openai/agents@0.3.0";
import { getLeadById, storeLead, updateLeadOutput } from "./db.ts";
</dd>
{/* OpenAI */}
<dt>
<strong>OpenAI</strong>
</dt>
<dd>
<details>
<summary>
{healthData.connections.openai.status === "connected" ? (
<code className="pico-background-green-100">
Connected
</code>
) : healthData.connections.openai.status === "error" ? (
<code
className="pico-background-red-500"
<article>
<dl>
{healthData.connections.openai.status === "error" && (
<>
<dt>Error</dt>
<dd>{healthData.connections.openai.error}</dd>
</>
)}
<dt>Environment variable</dt>
<dd>
<code>{healthData.connections.openai.envVar}</code>
</dd>
</dl>
{healthData.connections.openai.status !== "connected" && (
<p>
To connect to OpenAI, add to your{" "}
{envUrl ? (
<a href={envUrl} target="_blank">
<br />
<code>
{healthData.connections.openai.envVar}=your-api-key
</code>
</p>
<pre>
{JSON.stringify(
healthData.connections.openai,
null,
2
/**
* AI service for fuzzy matching using OpenAI
*
* Uses OPENAI_API_KEY env var if set (recommended for production).
* Falls back to Val Town's shared @std/openai (has 10 req/min limit).
*/
import { OpenAI as ValTownOpenAI } from "https://esm.town/v/std/openai";
import OpenAINpm from "npm:openai@4";
import type { SummaryResult } from '../../shared/types.ts';
// Use own API key if available, otherwise fall back to Val Town's shared client
function getOpenAIClient() {
const apiKey = Deno.env.get("OPENAI_API_KEY");
if (apiKey) {
return new OpenAINpm({ apiKey });
}
// Fall back to Val Town's shared OpenAI (has rate limits)
return new ValTownOpenAI();
}
/**
* Validate OpenAI API key by making a simple API call
* Uses models.list() as a lightweight validation endpoint
*
* @returns {success, error?}
*/
export async function validateOpenAIConnection(): Promise<{
success: boolean;
error?: string;
}> {
const apiKey = Deno.env.get('OPENAI_API_KEY');
if (!apiKey) {
return { success: false, error: 'OPENAI_API_KEY not set' };
}
try {
const openai = getOpenAIClient();
const response = await openai.models.list();
// Access .data to force the API call (lazy evaluation)
const models = response.data;
if (!models) {
return { success: false, error: 'No response from OpenAI' };
}
return { success: true };
}
const openai = getOpenAIClient();
const candidateList = candidates
try {
const completion = await openai.chat.completions.create({
model: "gpt-4o-mini",
max_tokens: 100,
*/
export async function generateSummary(context: SummaryContext): Promise<SummaryResult> {
const openai = getOpenAIClient();
// Build context sections for the prompt
try {
const completion = await openai.chat.completions.create({
model: "gpt-4o-mini",
max_tokens: 200,
"Bash(find:*)",
"Bash(deno check:*)",
"WebFetch(domain:platform.openai.com)",
"WebSearch"
],
name: {
displayName: "Name",
purpose: "What the todo says. May be cleaned up by AI if OpenAI is configured.",
category: "core",
required: true,

Vals

No vals found

Users

No users found

Docs

No docs found
2
Next