Search

136 results found for embeddings (1357ms)

Code
127

matches({ type: "embedding", provider: "openai" }),
async () => {
const { OpenAIEmbeddings } = await import("https://esm.sh/langchain/embeddings/openai");
return new OpenAIEmbeddings(args);
},
],
async () => {
await import("https://esm.sh/@huggingface/inference");
const { HuggingFaceInferenceEmbeddings } = await import("https://esm.sh/langchain/embedd
return new HuggingFaceInferenceEmbeddings(args);
},
],
matches({ type: "embedding", provider: "openai" }),
async () => {
const { OpenAIEmbeddings } = await import(
"npm:langchain/embeddings/openai"
);
return new OpenAIEmbeddings(args);
},
],
async () => {
await import("npm:@huggingface/inference");
const { HuggingFaceInferenceEmbeddings } = await import(
"npm:langchain/embeddings/hf"
);
return new HuggingFaceInferenceEmbeddings(args);
},
],
// Get embedding for a single emoji
async function getEmbedding(emoji: string): Promise<number[]> {
const result = await openai.embeddings.create({
input: emoji,
model: "text-embedding-3-small",
// // Get embedding for a single emoji
// async function getEmbedding(emoji: string): Promise<number[]> {
// const result = await openai.embeddings.create({
// input: emoji,
// model: "text-embedding-3-small",
// }
// // Get embeddings for all emojis
// async function getAllEmbeddings(): Promise<EmojiEmbedding[]> {
// const emojis = getAllEmojis();
// const embeddings: EmojiEmbedding[] = [];
// // Process emojis in batches to avoid rate limits
// const batchResults = await Promise.all(batchPromises);
// embeddings.push(...batchResults);
// // Add a small delay between batches
// }
// return embeddings;
// }
// function findNearestNeighbors(
// targetEmbedding: number[],
// allEmbeddings: EmojiEmbedding[],
// k: number = 5,
// ): { emoji: string; similarity: number }[] {
// return allEmbeddings
// .map(entry => ({
// emoji: entry.emoji,
// async function main() {
// try {
// console.log("Getting embeddings for all emojis...");
// const allEmbeddings = await getAllEmbeddings();
// // Example: Find nearest neighbors for 😆
// console.log(`Finding nearest neighbors for ${targetEmoji}...`);
// const neighbors = findNearestNeighbors(targetEmbedding, allEmbeddings);
// console.log("\nNearest neighbors:");
# Emoji Instant Search
Uses vector embeddings to get "vibes" search on emojis
// Generate embedding for a given text
async function generateEmbedding(text: string): Promise<number[]> {
const response = await openai.embeddings.create({
model: "text-embedding-ada-002",
input: text,
matches({ type: "embedding", provider: "openai" }),
async () => {
const { OpenAIEmbeddings } = await import(
"npm:langchain/embeddings/openai"
);
return new OpenAIEmbeddings(args);
},
],
async () => {
await import("npm:@huggingface/inference");
const { HuggingFaceInferenceEmbeddings } = await import(
"npm:langchain/embeddings/hf"
);
return new HuggingFaceInferenceEmbeddings(args);
},
],
matches({ type: "embedding", provider: "openai" }),
async () => {
const { OpenAIEmbeddings } = await import(
"npm:langchain/embeddings/openai"
);
return new OpenAIEmbeddings(args);
},
],
async () => {
await import("npm:@huggingface/inference");
const { HuggingFaceInferenceEmbeddings } = await import(
"npm:langchain/embeddings/hf"
);
return new HuggingFaceInferenceEmbeddings(args);
},
],
app.post("/", async (c) => {
const body = await c.req.json();
tTags, getTagsPrompt, getSummary, getSummaryPrompt, getEmbeddings } = body;
try {
}
const [summary, tags, embeddings] = await Promise.all([
getSummary ? getSummaryFn(getSummaryPrompt, content) : null,
getTags ? getTagsFn(getTagsPrompt, content) : null,
getEmbeddings ? getEmbeddingsFn(content) : null
]);
return c.json({ summary: summary?.text, tags: tags?.text, content, embeddings });
} catch (error) {
return c.json({ error: error.message || error || "Unknown error."})
export const getBlurbFromUrl = async (url: string, { content,
getSummary = true, getSummaryPrompt, getTags = true, getTagsPrompt, getEmbeddings,
model, provider
} = {}) => {
}
let summary, tags, embeddings
if (getSummary) {
summary = await getSummaryFn(getSummaryPrompt, content, { model, provider });
tags = await getTagsFn(getTagsPrompt, content, { model, provider });
}
if (getEmbeddings) {
embeddings = await getEmbeddingsFn(content + " " + summary + " " + tags);
}
summary: summary.text,
tags: tags.text,
embeddings
}
};
}
export const getEmbeddingsFn = async (content) => {
let result = await ai({
provider: "openai",
ejfox/umap/main.tsx
24 matches
* This microservice implements a high-performance dimensionality reduction API using UMAP.
* It uses the umap-js library for efficient UMAP computation and implements caching for improve
equests with JSON payloads containing high-dimensional embeddings and configuration parameters.
* It returns 2D coordinates as the result of dimensionality reduction.
*
*
* Common use cases include:
* - Visualizing word embeddings or document vectors in NLP tasks
* - Analyzing gene expression data in bioinformatics
* - Exploring customer segmentation in marketing analytics
* - Visualizing image embeddings in computer vision tasks
*/
try {
const { embeddings, config } = await request.json();
// Input validation
if (!Array.isArray(embeddings) || embeddings.length === 0) {
return new Response("Invalid input: embeddings must be a non-empty array", { status: 400 }
}
if (embeddings.length > MAX_POINTS) {
return new Response(`Input too large: maximum ${MAX_POINTS} points allowed`, { status: 413
}
if (embeddings[0].length > MAX_DIMENSIONS) {
return new Response(`Input too high-dimensional: maximum ${MAX_DIMENSIONS} dimensions allo
}
// Generate cache key
const encoder = new TextEncoder();
const data = encoder.encode(JSON.stringify({ embeddings, config }));
const hashBuffer = await crypto.subtle.digest("MD5", data);
const hashArray = Array.from(new Uint8Array(hashBuffer));
// Perform UMAP with timeout
const result = await Promise.race([
umap.fit(embeddings),
new Promise((_, reject) => setTimeout(() => reject(new Error("Computation timed out")), TI
]);
<h2>Common Use Cases</h2>
<ul>
<li>Visualizing word embeddings or document vectors in NLP tasks</li>
<li>Analyzing gene expression data in bioinformatics</li>
<li>Exploring customer segmentation in marketing analytics</li>
<li>Visualizing image embeddings in computer vision tasks</li>
</ul>
<h2>How to Use</h2>
to use the API. The request should include an array of embeddings and optional configuration par
<div class="example">
headers: { "Content-Type": "application/json" },
body: JSON.stringify({
embeddings: [[1,2,3], [4,5,6], [7,8,9]],
config: { nNeighbors: 15, minDist: 0.1, spread: 1.0 }
})
<h3>Curl Example:</h3>
<pre>
curl -X POST -H "Content-Type: application/json" -d '{"embeddings": [[1,2,3], [4,5,6], [7,8,9]],
</pre>
</div>
<div class="example">
<h3>Example with OpenAI Embeddings:</h3>
<p>This example shows how to use the UMAP service with OpenAI embeddings:</p>
<pre>
// First, generate embeddings using OpenAI API
import { OpenAI } from "https://esm.town/v/std/openai";
const openai = new OpenAI();
async function getEmbeddings(texts) {
const response = await openai.embeddings.create({
model: "text-embedding-ada-002",
input: texts,
}
// Then, use these embeddings with the UMAP service
const texts = ["Hello world", "OpenAI is amazing", "UMAP reduces dimensions"];
const embeddings = await getEmbeddings(texts);
fetch("/", {
headers: { "Content-Type": "application/json" },
body: JSON.stringify({
embeddings: embeddings,
config: { nNeighbors: 15, minDist: 0.1, spread: 1.0 }
})
function runDemo() {
const testData = {
embeddings: Array.from({length: 100}, () => Array.from({length: 10}, () => Math.
config: { nNeighbors: 15, minDist: 0.1, spread: 1.0 }
};
## Common Use Cases
- Visualizing word embeddings in a scatterplotcs
- Exploring customer segmentation in marketing analytics
- Visualizing image embeddings in computer vision tasks
tmcw
surprisingEmbeddings
Visualizing embedding distances
maxm
emojiVectorEmbeddings
 
janpaul123
blogPostEmbeddingsDimensionalityReduction
 
janpaul123
compareEmbeddings
 
yawnxyz
embeddingsSearchExample
 

Users

No users found
No docs found