Search
Code4,154
console.log("π€ Sending request to Groq Whisper API"); const start = Date.now(); const response = await fetch("https://api.groq.com/openai/v1/audio/transcriptions", { method: "POST", headers: { console.log("π΅ Sending request to Groq API"); const start = Date.now(); const response = await fetch("https://api.groq.com/openai/v1/chat/completions", { method: "POST", headers: { console.log("π Sending request to Groq Speech API"); const start = Date.now(); const response = await fetch("https://api.groq.com/openai/v1/audio/speech", { method: "POST", headers: { console.log("π Sending request to Groq Speech API"); const start = Date.now(); const response = await fetch("https://api.groq.com/openai/v1/audio/speech", { method: "POST", headers: { console.log("π΅ Sending request to Groq API"); const start = Date.now(); const response = await fetch("https://api.groq.com/openai/v1/chat/completions", { method: "POST", headers: { console.log("π€ Sending request to Groq Whisper API"); const start = Date.now(); const response = await fetch("https://api.groq.com/openai/v1/audio/transcriptions", { method: "POST", headers: {Note: When changing a SQLite table's schema, change the table's name (e.g., add _2 or _3) to create a fresh table.### OpenAI```tsimport { OpenAI } from "https://esm.town/v/std/openai";const openai = new OpenAI();const completion = await openai.chat.completions.create({ messages: [ { role: "user", content: "Say hello in a creative way" },Note: When changing a SQLite table's schema, change the table's name (e.g., add _2 or _3) to create a fresh table.### OpenAI```tsimport { OpenAI } from "https://esm.town/v/std/openai";const openai = new OpenAI();const completion = await openai.chat.completions.create({ messages: [ { role: "user", content: "Say hello in a creative way" },import OpenAI, { type ClientOptions } from "npm:openai";export interface WriterOptions extends ClientOptions {export abstract class AbstractWriter { model: string; openai: OpenAI; constructor(options: WriterOptions) { const { model, ...openaiOptions } = options; this.model = model ? model : "gpt-3.5-turbo"; this.openai = new OpenAI(openaiOptions); } protected abstract createPrompt(...args: any[]): string; protected async performOpenAICall(prompt: string) { try { const response = await this.openai.chat.completions.create({ messages: [{ role: "system", content: prompt }], model: this.model, if (!response.choices || response.choices.length === 0) { throw new Error("No response from OpenAI"); } if (!output) { throw new Error("No output returned by OpenAI. Try again."); }import { type WriterOptions } from "./WriterOptions";import { fetch } from "https://esm.town/v/std/fetch?v=4";import OpenAI, { type ClientOptions } from "npm:openai";export class ReadmeWriter { model: string; openai: OpenAI; apiKey: string; valtownKey: string; constructor(options: WriterOptions) { const { model, ...openaiOptions } = options; this.model = model ? model : "gpt-3.5-turbo"; this.openai = new OpenAI(openaiOptions); this.valtownKey = Deno.env.get("valtown"); } } private async performOpenAICall(prompt: string) { try { const response = await this.openai.chat.completions.create({ messages: [{ role: "system", content: prompt }], model: this.model, if (!response.choices || response.choices.length === 0) { throw new Error("No response from OpenAI"); } if (!readme) { throw new Error("No readme returned by OpenAI. Try again."); } const { id, code } = await this.getVal(username, valName); const prompt = this.createPrompt(code, userPrompt); const readme = await this.performOpenAICall(prompt); return { id, readme }; }import { type WriterOptions } from "./WriterOptions";import { sqlite } from "https://esm.town/v/std/sqlite";import OpenAI from "npm:openai";interface QueryWriterOptons extends WriterOptions { model: string; apiKey: string; openai: OpenAI; constructor(options: QueryWriterOptons) { const { table, model, ...openaiOptions } = options; this.table = table; this.model = model; // this.apiKey = openaiOptions.apiKey ? openaiOptions.apiKey : Deno.env.get("OPENAI_API_KEY"); this.openai = new OpenAI(openaiOptions); } try { const response = await this.openai.chat.completions.create({ messages: [{ role: "system", content: prompt }], model: this.model, if (!response.choices || response.choices.length === 0) { throw new Error("No response from OpenAI"); } if (!query) { throw new Error("No SQL returned from OpenAI. Try again."); } try { const response = await this.openai.chat.completions.create({ messages: [{ role: "system", content: prompt }], model: this.model, if (!response.choices || response.choices.length === 0) { throw new Error("No response from OpenAI"); } if (!query) { throw new Error("No SQL returned from OpenAI. Try again."); }import { type ClientOptions } from "npm:openai";export interface WriterOptions extends ClientOptions {reconsumeralization
import { OpenAI } from "https://esm.town/v/std/openai";
import { sqlite } from "https://esm.town/v/stevekrouse/sqlite";
/**
* Practical Implementation of Collective Content Intelligence
* Bridging advanced AI with collaborative content creation
*/
exp
kwhinnery_openai
lost1991
import { OpenAI } from "https://esm.town/v/std/openai";
export default async function(req: Request): Promise<Response> {
if (req.method === "OPTIONS") {
return new Response(null, {
headers: {
"Access-Control-Allow-Origin": "*",
No docs found