Avatar

weaverwhale

9 public vals
Joined April 12, 2024
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
import process from "node:process";
export const langchainEx = (async () => {
const { OpenAI } = await import("https://esm.sh/langchain/llms/openai");
const { PromptTemplate } = await import("https://esm.sh/langchain/prompts");
const { LLMChain } = await import("https://esm.sh/langchain/chains");
const model = new OpenAI({
temperature: 0.9,
openAIApiKey: process.env.openai,
maxTokens: 100,
}, {});
const template = "What is a good name for a company that makes {product}?";
const prompt = new PromptTemplate({
template: template,
inputVariables: ["product"],
});
const chain = new LLMChain({ llm: model, prompt: prompt });
const res = await chain.call({ product: "colorful socks" });
console.log(res);
return res;
})();

GistGPT Client

Provide a RAW file URL from Github, BitBucket, GitLab, Val Town, etc. and GistGPT will provide you the gist of the code.

Client for https://www.val.town/v/weaverwhale/GistGPT

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
/** @jsxImportSource https://esm.sh/hono/jsx */
import { fetch } from "https://esm.town/v/std/fetch";
import { Marked } from "npm:@ts-stack/markdown";
import { Hono } from "npm:hono";
const app = new Hono();
app.get("/", (c) => {
return c.render(
`<html>
<head>
<title>GistGPT</title>
<script src="https://cdn.tailwindcss.com"></script>
</head>
<body class="p-4 max-w-lg mx-auto">
<h1 class="text-3xl font-bold mb-4">
GistGPT
</h1>
<p class="mb-4">Client for the <a class="underline" target="_blank" href="https://www.val.town/v/weaverwhale/GistGPT">GistGPT API</a></p>
<p class="mb-4">Provide a RAW file URL from Github, BitBucket, GitLab, Val Town, etc. and GistGPT will provide you the gist of the code.</p>
<input
class="bg-gray-50 border border-gray-300 text-gray-900 text-sm rounded-lg focus:ring-blue-500 focus:border-blue-500 block w-full p-2.5 dark:bg-gray-700 dark:border-gray-600 dark:placeholder-gray-400 dark:text-white dark:focus:ring-blue-500 dark:f
placeholder="Provide a raw gist link here"
/>
<p class="my-4">Have it summarize <a class="inline-block underline cursor-pointer" id="summarize-this">this file</a>, or <a class="inline-block underline cursor-pointer" id="summarize-api">the API's file</a> and it will explain how it works 🤯</p>
</body>
<script>
document.getElementById("summarize-api").addEventListener("click", (e) => {
e.target.disabled = true;
window.location.href = "/gist?url=https://esm.town/v/weaverwhale/GistGPT";
document.querySelector("input").value = "Summarizing the API\'s file...";
})
document.getElementById("summarize-this").addEventListener("click", (e) => {
e.target.disabled = true;
window.location.href = "/gist?url=https://esm.town/v/weaverwhale/GistGPT_Client";
document.querySelector("input").value = "Summarizing this file...";
})
document.querySelector("input").addEventListener("keyup", (e) => {
// if enter is pressed, submit the form
if (e.key === 'Enter' || e.keyCode === 13) {
e.preventDefault();
document.querySelector("input").disabled = true;
const v = document.querySelector("input").value
window.location.href = "/gist?url=" + encodeURIComponent(v);
document.querySelector("input").value = 'Summarizing \"' + v + '\"...';
}
})
</script>
</html>`,
);
});
app.get("/gist", async (c) => {
const url = c.req.query("url");
// https://www.val.town/v/weaverwhale/GistGPT
const answer = await fetch(`https://weaverwhale-gistgpt.web.val.run/gist?url=${url}`).then(
(res) => res.text(),
);
return c.render(
`<html>
<head>
<title>GistGPT</title>
<script src="https://cdn.tailwindcss.com"></script>
<style>
p, li { margin-bottom: 1em !important; }
code { background: black; color: white; }
</style>
</head>
<body class="p-4 max-w-lg mx-auto">
<h1 class="text-3xl font-bold mb-4">
GistGPT
</h1>
<h2 class="text-2xl font-bold mb-4">
The gist of your code is...
</h2>
<p style="word-wrap: break-word; white-space: pre-wrap;">${Marked.parse(answer)}</p>
<p class="mt-4">This was based on the provided URL: <a target="_blank" class="underline inline-block" href="${url}">${url}</a></p>
</body>
</html>`,
);
});
export default app.fetch;

GistGPT

A helpful assistant who provides the gist of a gist

How to use

/ and /gist - Default response is to explain this file. I believe this is effectively real-time recursion?

/gist?url={URL} - Provide a RAW file URL from Github, BitBucket, GitLab, Val Town, etc. and GistGPT will provide you the gist of the code.

/about - "Tell me a little bit about yourself"

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
import { fetch } from "https://esm.town/v/std/fetch";
import { Hono } from "npm:hono";
import { OpenAI } from "npm:openai";
const gistGPT = async (input: string, about?: boolean) => {
// if about, use the input as the prompt
// if not, use the input as a code file URL, and fetch the content
const chatInput = about ? input : await (await fetch(input)).text();
const openai = new OpenAI();
let chatCompletion = await openai.chat.completions.create({
messages: [
{
role: "system",
content: `
You are a helpful assistant for a senior software developer.
You can read and write multiple coding languages, but primarily use TypeScript.
Your goal is to accept snippets of code, and return a summary of it.
`.replaceAll("\n", ""),
},
about
? {
role: "system",
content: `
If anyone asks you about yourself, pretend you are a senior software developer.
Don't ask how you can assist; just tell me a little bit about yourself.
`.replaceAll("\n", ""),
}
: {
role: "system",
content: `
Based on the provided code snippet, summarize it in as much detail as possible.
Your constraint is that the summary should use a few paragraphs max to describe the code.
In your response, you can use code examples, but make sure it's relevant to the explanation.
Format your response as markdown.
Include helpful links when they are available.
This is for my job, so please don't include any personal information.
Remember, you are a senior software developer.
Don't ask how you can assist; just do the best you can.
`.replaceAll("\n", ""),
},
{
role: "user",
content: JSON.stringify(chatInput),
},
],
// model: "gpt-4-1106-preview",
model: "gpt-3.5-turbo",
max_tokens: 4000,
temperature: 0,
});
return chatCompletion.choices[0].message.content;
};
const defaultGistUrl = "https://esm.town/v/weaverwhale/GistGPT";
const app = new Hono();
app.get("/", async (c) => {
return c.text(await gistGPT(defaultGistUrl));
});
app.get("/gist", async (c) => {
const url = decodeURI(c.req.query("url") || defaultGistUrl);
return c.text(await gistGPT(url));
});
app.get("/about", async (c) => {
return c.text(await gistGPT("Tell me a little bit about yourself", true));
});
export default app.fetch;
1
2
3
4
5
6
7
8
import isMyWebsiteDown from "https://esm.town/v/weaverwhale/isMyWebsiteDown";
export default async function(interval: Interval) {
const data = await isMyWebsiteDown("https://app.triplewhale.com");
console.log(data);
return data;
}
1
2
3
4
5
6
import isMyWebsiteDown from "https://esm.town/v/weaverwhale/isMyWebsiteDown";
const data = await isMyWebsiteDown("https://app.triplewhale.com");
console.log(data);
export default data;
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
import { email } from "https://esm.town/v/std/email?v=11";
import { fetch } from "https://esm.town/v/std/fetch";
export default async (URL) => {
const [date, time] = new Date().toISOString().split("T");
let ok = true;
let reason: string;
try {
const res = await fetch(URL);
if (res.status !== 200) {
reason = `(status code: ${res.status})`;
ok = false;
}
} catch (e) {
reason = `couldn't fetch: ${e}`;
ok = false;
}
if (ok) {
console.log(`Website up (${URL})`);
} else {
const subject = `Website down (${URL})`;
const text = `At ${date} ${time} (UTC), ${URL} was down (reason: ${reason}).`;
console.log(subject);
console.log(text);
await email({ subject, text });
}
};
1
2
3
4
import { chat } from "https://esm.town/v/weaverwhale/chat";
const { content } = await chat("Tell me a story");
console.log(content);

OpenAI ChatGPT helper function

This val uses your OpenAI token if you have one, and the @std/openai if not, so it provides limited OpenAI usage for free.

Create valimport { chat } from "https://esm.town/v/stevekrouse/openai"; const { content } = await chat("Hello, GPT!"); console.log(content);
Create valimport { chat } from "https://esm.town/v/stevekrouse/openai"; const { content } = await chat( [ { role: "system", content: "You are Alan Kay" }, { role: "user", content: "What is the real computer revolution?"} ], { max_tokens: 50, model: "gpt-4" } ); console.log(content);
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
import type { ChatCompletion, ChatCompletionCreateParamsNonStreaming, Message } from "npm:@types/openai";
async function getOpenAI() {
// if you don't have a key, use our std library version
if (Deno.env.get("OPENAI_API_KEY") === undefined) {
const { OpenAI } = await import("https://esm.town/v/std/openai");
return new OpenAI();
} else {
const { OpenAI } = await import("npm:openai");
return new OpenAI();
}
}
/**
* Initiates a chat conversation with OpenAI's GPT model and retrieves the content of the first response.
* This function can handle both single string inputs and arrays of message objects.
* It supports various GPT models, allowing for flexibility in choosing the model based on the application's needs.
*
* @param {string | Message[]} input - The input message(s) to send to GPT. Can be a single string or an array of message objects.
* @param {object} options - Additional options for the completion request.
* @returns {Promise<string>} A promise that resolves to the content of the first response from the completion.
*/
export async function chat(
input: string | Message[],
options?: Omit<ChatCompletionCreateParamsNonStreaming, "messages">,
): Promise<ChatCompletion & { content: string }> {
const openai = await getOpenAI();
const messages = Array.isArray(input) ? input : [{ role: "user", content: input }];
const createParams: ChatCompletionCreateParamsNonStreaming = {
max_tokens: 30,
model: "gpt-3.5-turbo",
...(options ?? {}),
messages,
};
const completion = await openai.chat.completions.create(createParams);
return { ...completion, content: completion.choices[0].message.content };
}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
import { API_URL } from "https://esm.town/v/std/API_URL";
export async function api<T = any>(
path: string,
options?: RequestInit & {
authenticated?: boolean;
paginate?: boolean;
},
): Promise<T> {
const authorization = options?.authenticated ? `Bearer ${Deno.env.get("valtown")}` : undefined;
if (options?.paginate) {
const data = [];
let url = new URL(`${API_URL}${path}`);
url.searchParams.set("limit", "100");
while (true) {
const resp = await fetch(url);
if (!resp.ok) {
throw new Error(await resp.text());
}
const res = await resp.json();
data.push(...res.data);
if (!res.links.next) {
break;
}
url = new URL(res.links.next);
}
return { data } as T;
}
const resp = await fetch(`${API_URL}${path}`, {
...options,
headers: {
...options?.headers,
Authorization: authorization,
},
});
const text = await resp.text();
if (!resp.ok) {
throw new Error(text);
}
if (!text) {
return null;
}
return JSON.parse(text);
}
Next