Search

50 results found for substrate (1749ms)

Code
47

Generate a story and summarize it using [Substrate](https://substrate.run/).
🪩 To fork, [sign up for Substrate](https://substrate.run/signin) to get your own API key and $5
import { ComputeText, sb, Substrate } from "npm:substrate";
const substrate = new Substrate({ apiKey: Deno.env.get("SUBSTRATE_API_KEY") });
const story = new ComputeText({ prompt: "tell me a story, be concise", max_tokens: 800 });
});
const stream = await substrate.stream(summary);
export default async function render(req: Request): Promise<Response> {
return new Response("User-agent: *\nDisallow: /");
}
t renderNodeResults = (await import("https://esm.town/v/substrate/renderNodeResults")).default;
return renderNodeResults(stream);
}
Currently it renders cached output, for demo purposes (running takes 30-60s).
🪩 To fork, [sign up for Substrate](https://substrate.run/signin) to get your own API key and $5
pa/gifStory/main.tsx
11 matches
import { extractValInfo } from "https://esm.town/v/pomdtr/extractValInfo";
, ComputeText, GenerateImage, sb, StableVideoDiffusion, Substrate } from "npm:substrate";
import { z } from "npm:zod";
import { zodToJsonSchema } from "npm:zod-to-json-schema";
const substrate = new Substrate({ apiKey: Deno.env.get("SUBSTRATE_API_KEY") });
const topic = "a traveler lost in a rainforest";
`));
// Don't run the graph, we'll display the cached result
// const res = await substrate.run(...videoNodes);
// const texts = res.get(sentences).json_object.sentences;
// let gifUrls = [];
const texts = sentencesResult.sentences;
let gifUrls = [
"https://cdn.substrate.run/u/TdI00FeAB8nzr0Su/d73078011783f5ac37980e47df9d0ee7985fd2d476ee
"https://cdn.substrate.run/u/TdI00FeAB8nzr0Su/69c79387d16391d3f8c246ccbd694bed55f39d3a60a6
"https://cdn.substrate.run/u/TdI00FeAB8nzr0Su/75db479fdc66d185edbf24f7b037257adb98ec7bfe7b
"https://cdn.substrate.run/u/TdI00FeAB8nzr0Su/bd26d54a083b540210ac23bc9fd296fc13db9a9edff6
"https://cdn.substrate.run/u/TdI00FeAB8nzr0Su/b14849ad8bb9af9c60567d85d37de79c1d634748ed6a
];
// Render page
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>GIF Story | Substrate</title>
<style>
#container {
<div>
<div style="text-align:center;background-color:transparent;padding:4px;border-radius:4px;">
<a href="https://www.substrate.run" target="_blank" style="font-size:0.8rem;color:#000;tex
<a href="https://www.val.town/v/substrate/gifStory" style="font-size:0.8rem;color:#000;tex
</div>
<script>
import process from "node:process";
import { renderToString } from "npm:react-dom@18/server";
import { QueryVectorStore, sb, Substrate } from "npm:substrate";
type ShotResult = {
async function getResults(q: string, n: number): Promise<ShotResult[]> {
const substrate = new Substrate({
apiKey: process.env.SUBSTRATE_API_KEY,
});
const collectionName = "shotclip";
top_k: n,
}, { cache_age: 60 * 60 });
const res = await substrate.run(query);
return res.get(query).results[0] as ShotResult[];
}
<p style={{ textAlign: "center", margin: "2px 0" }}>
<a
href="https://www.substrate.run/"
style={{
textDecoration: "none",
}}
>
[made with substrate]
</a>
</p>
[sub.audio](https://sub.audio) – generate subtitles and chapters for any audio URL.
Speech-to-text and chapter summaries powered by [Substrate](https://substrate.run)
🪩 To fork, [sign up for Substrate](https://substrate.run/signin) to get your own API key and $5
import { ComputeJSON, sb, Substrate, TranscribeSpeech } from "npm:substrate";
import { extractValInfo } from "https://esm.town/v/pomdtr/extractValInfo";
async function processAudio(audio_uri) {
const substrate = new Substrate({ apiKey: Deno.env.get("SUBSTRATE_API_KEY") });
const opts = { cache_age: 60 * 60 * 24 * 7 };
opts,
);
const res = await substrate.run(transcribe, chapters, timestamps);
return {
transcript: res.get(transcribe),
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1.0" />
<link rel="icon" href="https://www.substrate.run/favicon.ico" type="image/x-icon">
<link
href="https://fonts.googleapis.com/css2?family=Roboto:wght@700&display=swap"
${
!audio_uri
? `<a id="federer-link" href="?url=https://media.substrate.run/federer-dartmouth.m4a">Or t
: ""
}
></div>
</div>
<a id="made-with" href="https://substrate.run" target="_blank"
>made with <b>substrate</b></a
>
<a id="view-source" href="${extractValInfo(import.meta.url).htmlUrl}" target="_blank"
#### SHOTCLIP
Demo of embedding images with [substrate](https://www.substrate.run/), and querying them for sem
Use the query parameter `prompt` to control the search.
![preview](https://media.substrate.run/shotclip-preview.png)
Generates a streaming illustrated primer on a subject. Enter a subject, click on titles to "delv
🪩 To fork, [sign up for Substrate](https://substrate.run/signin) to get your own API key and $5
import inputHeader from "https://esm.town/v/substrate/inputHeader";
import { ComputeJSON, ComputeText, GenerateImage, sb, Substrate } from "npm:substrate";
import { z } from "npm:zod";
import { zodToJsonSchema } from "npm:zod-to-json-schema";
const input = new URL(req.url).searchParams.get("input") || "modernism";
// Substrate graph
const substrate = new Substrate({ apiKey: Deno.env.get("SUBSTRATE_API_KEY_R") });
const Topic = z
.object({
const image4 = new GenerateImage({ prompt: prompt4.future.text });
const nodes = [image1, caption1, image2, caption2, image3, caption3, image4, caption4];
const stream = await substrate.stream(...nodes);
// UI
substrate
substrateBadge
 
Public
substrate
substrateBadgeMiddleware
 
Public

Docs

No docs found