Search results

Algo
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
import { email } from "https://esm.town/v/std/email?v=9";
import { fetch } from "https://esm.town/v/std/fetch";
export async function exchangeRate() {
const cheerio = await import("npm:cheerio");
const page = await fetch(
"https://kur.doviz.com/serbest-piyasa/amerikan-dolari",
).then((response) => response.text());
const $ = cheerio.load(page);
const table = $(".value-table");
await email({
html: table.html(),
subject: "TRY/USD exchange rate alert from val.town",
});
console.log("email sent!");
}
1
2
3
4
5
6
7
8
9
10
11
12
import { fetchText } from "https://esm.town/v/stevekrouse/fetchText?v=5";
export const webscrapeWikipediaIntro = (async () => {
const cheerio = await import("npm:cheerio");
const html = await fetchText(
"https://en.wikipedia.org/wiki/OpenAI",
);
const $ = cheerio.load(html);
// Cheerio accepts a CSS selector, here we pick the second <p>
const intro = $("p:nth-of-type(2)").first().text();
return intro;
})();
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
import process from "node:process";
// fork by https://esm.town/v/webup/getWebLoaderBuilder;
export async function getWebLoaderBuilder(
url: string,
type: "webpage" | "github" | "audio" = "webpage",
options?: any,
) {
const { cond, matches } = await import("npm:lodash-es");
const setup = cond([
[
matches({ type: "webpage" }),
async () => {
await import("npm:cheerio");
const { CheerioWebBaseLoader } = await import(
"npm:langchain/document_loaders/web/cheerio"
);
return new CheerioWebBaseLoader(url);
},
],
[
matches({ type: "github" }),
async () => {
await import("npm:ignore");
const { GithubRepoLoader } = await import(
"npm:langchain/document_loaders/web/github"
);
return new GithubRepoLoader(url, options);
},
],
[
matches({ type: "audio" }),
async () => {
const { AudioTranscriptLoader } = await import(
"npm:langchain/document_loaders/web/assemblyai"
);
return new AudioTranscriptLoader({
audio_url: url,
...options,
}, {
apiKey: process.env.ASSEMBLYAI,
});
},
],
]);
return () => setup({ type });
}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
import { fetch } from "https://esm.town/v/std/fetch";
export const getTideTimes = (async () => {
const cheerio = await import("npm:cheerio");
const response = await fetch(
"https://www.worldtides.info/tidestations/Europe/Ireland/DUBLIN_(NORTH_WALL)",
);
const body = await response.text();
const $ = cheerio.load(body);
const tableRows = $("table.table.table-bordered tbody tr");
const tideTimes = [];
for (let i = 1; i < tableRows.length; i++) {
const row = tableRows[i];
const tide = $(row).find("td:nth-child(1)").text().trim();
const time = $(row).find("td:nth-child(2)").text().trim();
const heightStr = $(row).find("td:nth-child(3)").text().trim();
const regex = /([\d.]+)\s*m/;
const match = regex.exec(heightStr);
const heightInMeters = match[0];
tideTimes.push({ tide, time, height: heightInMeters });
}
return tideTimes;
});
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
import { fetch } from "https://esm.town/v/std/fetch";
import process from "node:process";
export const runAgent = (async () => {
const { z } = await import("npm:zod");
const { ChatOpenAI } = await import("npm:langchain/chat_models/openai");
const { ChatAnthropic } = await import("npm:langchain/chat_models/anthropic");
const { DynamicTool, Tool, SerpAPI } = await import("npm:langchain/tools");
const { initializeAgentExecutorWithOptions } = await import(
"npm:langchain/agents"
);
const cheerio = await import("npm:cheerio");
const { LLMChain } = await import("npm:langchain/chains");
const { ChatPromptTemplate, HumanMessagePromptTemplate } = await import(
"npm:langchain/prompts"
);
const { StructuredOutputParser, OutputFixingParser } = await import(
"npm:langchain/output_parsers"
);
const model = new ChatOpenAI({
openAIApiKey: process.env.OPENAI_API_KEY,
modelName: "gpt-4",
maxTokens: 2048,
});
const anthropicModel = new ChatAnthropic({
modelName: "claude-v1",
anthropicApiKey: process.env.ANTHROPIC_API_KEY,
temperature: 0,
});
// I had an idea where the agent could scrape individual PR pages, didn't implement
const outputParser = StructuredOutputParser.fromZodSchema(z.array(
z.object({
contributor: z.string().describe(
"The name of the main contributor of the PR",
),
description: z.string().describe(
"A description of what the pull request is for",
),
isFirstContribution: z.boolean().describe(
"Whether it is the contributor's first contribution",
),
pullRequestNumber: z.number().describe("The number of the pull request"),
}).describe("An objects representing a pull request"),
));
const outputFixingParser = OutputFixingParser.fromLLM(model, outputParser);
const tools = [
new DynamicTool({
name: "langchain-release-summarizer",
description:
"Extracts information about the pull requests merged as part of a LangChain release. Takes a GitHub URL as input.",
func: async (input, runManager) => {
const response = await fetch(input.trim());
const pageContent = await response.text();
const $ = cheerio.load(pageContent);
const releaseNotes = $("#repo-content-pjax-container").text();
const prExtractionChain = new LLMChain({
llm: anthropicModel,
prompt: ChatPromptTemplate.fromPromptMessages([
HumanMessagePromptTemplate.fromTemplate(`{query}\n\n{pageContent}`),
]),
outputParser: outputFixingParser,
outputKey: "pullRequests",
});
const summarizationResult = await prExtractionChain.call({
query:
`The following webpage contains the release notes for LangChain, an open source framework for building apps with LLMs.
List all of the pull requests mentioned in the release notes.
Extract the name of the main contributor, a description of the pull request, whether it is their first contribution, and the number of the pull request.
Be extremely verbose!`,
pageContent: releaseNotes,
}, runManager?.getChild());
return JSON.stringify(summarizationResult.pullRequests);
},
}),
new SerpAPI(process.env.SERPAPI_API_KEY, {
location: "Austin,Texas,United States",
hl: "en",
gl: "us",
}),
];
const agent = await initializeAgentExecutorWithOptions(tools, model, {
agentType: "chat-conversational-react-description",
});
const result = await agent.call({
input: `Generate a Twitter thread announcing a new LangChain release.
The release notes are available at this URL: https://github.com/hwchase17/langchainjs/releases/tag/0.0.84.
The release notes include a short description of each merged pull request and the contributor who built the feature.
The thread must start with a header tweet summarizing all the changes in the release.
The thread must contain a tweet for each pull request merged as part of the release that adds a significant feature, and must go into deep detail about what the contribution adds.
If you don't know what something mentioned in the release notes is, look it up with the provided tool so that you can get full context.
Each tweet should also thank the contributor by name, and congratulate them if it is their first contribution and put a medal emoji 🥇 next to their name.
Try to avoid repetitive language in your tweets.
Be extremely verbose and descriptive in your final response.
Below is an example of the format that tweets in the final output Twitter thread should follow. Individual tweets should be separated by a "-----" sequence:
-----Header tweet-----
@LangChainAI 🦜🔗 JS/TS 0.0.83 out with loads of @GoogleAI and PaLM!
💬 Google Vertex AI chat model + embeddings

cheerio

cheerio is a popular npm module that makes it easy to parse and manipulate HTML and XML.

cheerio is modeled after a reduced version of jQuery. Note that it's pretty different than default browser DOM methods, so you can't call things like .appendChild or document.createElement when using cheerio: it's not based on a full-fledged DOM or browser implementation. But, in exchange it's a lot faster and simpler.

Readme
1
2
3
4
5
6
7
8
export let cheerioExample = (async () => {
const { default: $ } = await import("npm:cheerio");
const document = $(`<article>
<div>Hello</div>
<span>World</span>
</article>`);
return document.find("span").text();
})();

Getting all boxes from MinhaBibliotecaCatolica.com.br

Readme
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
import { fetchText } from "https://esm.town/v/stevekrouse/fetchText?v=5";
export const webscrapeMinhaBibliotecaCatolicaBoxList = (async () => {
const sourceUrl =
"https://assine.bibliotecacatolica.com.br/edicoes-anteriores";
const { default: cheerio } = await import("npm:cheerio");
const html = await fetchText(sourceUrl);
console.log(cheerio);
const $ = cheerio.load(html);
const result = [];
$(".top_card").each((_, topCard) => {
// box info
const box = $(".tag_card span", topCard).first().text().trim();
const boxInfo = {
number: parseInt(/Box\s+(\d+)/gm.exec(box)[1], 10),
month: /^(\w+)/gm.exec(box)[1],
year: 2018 + Math.trunc(parseInt(/Box\s+(\d+)/gm.exec(box)[1], 10) / 12),
title: /(Box \d+)/gm.exec(box)[1],
};
result.push({
title: $(".title_card", topCard).first().text().trim(),
boxInfo,
thumbnailUrl: $(".tumbnail_card", topCard).attr("src"),
description: $(".description_card", topCard).first().text().trim(),
});
});
return result.sort((a, b) => {
if (a.boxInfo.number < b.boxInfo.number)
return -1;
if (a.boxInfo.number > b.boxInfo.number)
return 1;
return 0;
});
})();
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
import { fetch } from "https://esm.town/v/std/fetch";
let { interactiveBrokersMarginsState } = await import("https://esm.town/v/simone/interactiveBrokersMarginsState");
export async function interactiveBrokersMargins() {
function zip(...arrays) {
const result = [];
for (let i = 0; i < arrays[0].length; i++) {
result.push(arrays.map((a) => a[i]));
}
return result;
}
async function loadInteractiveBrokersData() {
const cheerio = await import("npm:cheerio");
const response = await fetch(
"https://www.interactivebrokers.com/en/trading/margin-futures-fops.php"
);
const html = await response.text();
const $ = cheerio.load(html);
const result = $("div.show_table, div.hide_table")
.toArray()
.map((div) => {
const exchangeName = $("h5", div).text();
const table = $("table", div);
const headings = $("thead tr th", table)
.map((_, h) => $(h).text())
.toArray();
const rows = $("tbody tr", table)
.toArray()
.map((row) =>
$("td", row)
.map((_, cell) => $(cell).text())
.toArray()
);
return {
exchangeName,
data: rows
.map((row) => Object.fromEntries(zip(headings, row)))
.reduce((acc, c) => ({ ...acc, [c["Trading Class"]]: c }), {}),
};
})
.reduce((acc, c) => ({ ...acc, [c.exchangeName]: c.data }), {});
return result;
}
const { detailedDiff } = await import("npm:deep-object-diff");
const newMargins = await loadInteractiveBrokersData();
const diff = detailedDiff(
interactiveBrokersMarginsState,
newMargins
);
interactiveBrokersMarginsState = newMargins;
console.email(diff);
}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
import { fetchText } from "https://esm.town/v/stevekrouse/fetchText?v=5";
export const thedyslexicdeveloperTikTok = (async () => {
const cheerio = await import("npm:cheerio");
const html = await fetchText(
`https://www.tiktok.com/@thedyslexicdeveloper`,
);
const $ = cheerio.load(html);
const data = $("h3").first().prop("innerText").match(/[^\d]+|\d+/g);
return `
${data[0]} - ${data[1]}
${data[2]} - ${data[3]}
${data[4]} - ${data[5]}
`;
})();
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
import { fetch } from "https://esm.town/v/std/fetch";
let { tradestationMarginsState } = await import("https://esm.town/v/simone/tradestationMarginsState");
export async function tradestationMargins() {
function zip(...arrays) {
const result = [];
for (let i = 0; i < arrays[0].length; i++) {
result.push(arrays.map((a) => a[i]));
}
return result;
}
async function loadTradestationData() {
const cheerio = await import("npm:cheerio");
const response = await fetch(
"https://www.tradestation.com/pricing/futures-margin-requirements/"
);
const html = await response.text();
const $ = cheerio.load(html);
const table = $("table.table");
const headings = $("thead > tr > th", table)
.map((i, e) => $(e).text())
.get();
const result = $("tbody > tr", table)
.toArray()
.map((row) =>
$("td", row)
.map((i, e) => $(e).text())
.toArray()
)
.filter((a) => a.length)
.map((a) => Object.fromEntries(zip(headings, a)))
.reduce((acc, c) => ({ ...acc, [c["Symbol Root"]]: c }), {});
return result;
}
const { detailedDiff } = await import("npm:deep-object-diff");
const newMargins = await loadTradestationData();
const diff = detailedDiff(tradestationMarginsState, newMargins);
tradestationMarginsState = newMargins;
console.email(diff);
}
1
2
3
4
5
6
7
8
9
10
11
12
import { fetch } from "https://esm.town/v/std/fetch";
export let nameTopHNThread = (async () => {
// Cheerio parses markup and provides an API to interact with the resulting data structure
const cheerio = await import("npm:cheerio"); // You can import NPM packages
const reply = await fetch("https://news.ycombinator.com/");
const replyText = await reply.text();
const $ = await cheerio.load(replyText);
const firstThreadTitle = $(".athing:first-child .titleline > a").first()
.text();
return firstThreadTitle;
})();
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
import { fetch } from "https://esm.town/v/std/fetch";
import { set } from "https://esm.town/v/std/set?v=11";
import { topHNThreadByHour } from "https://esm.town/v/elsif_maj/topHNThreadByHour";
export let nameTopHNThreadCron = (async () => {
// Cheerio parses markup and provides an API to interact with the resulting data structure
const cheerio = await import("npm:cheerio"); // You can import NPM packages
const reply = await fetch("https://news.ycombinator.com/");
const replyText = await reply.text();
const $ = await cheerio.load(replyText);
const firstThreadTitle = $(".athing:first-child .titleline > a").first()
.text();
let now = new Date();
let currentHour = now.getHours();
topHNThreadByHour.push(
"Top thread on Hackernews for " + currentHour + ":00 " +
"is: " + firstThreadTitle,
);
await set(
"topHNThreadByHour",
topHNThreadByHour,
);
});
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
import { fetch } from "https://esm.town/v/std/fetch";
interface IDetails {
title: string;
price: number;
url: string;
}
export const njuskaloSniff = async (search = []): Promise<IDetails | undefined> => {
const cheerio = await import("npm:cheerio");
const keywords = search.join("%20");
const searchUrl =
`https://www.njuskalo.hr/?ctl=search_ads&keywords=${keywords}&sort=new`;
const v = await fetch(searchUrl);
const txt = await v.text();
let $ = cheerio?.default?.load?.(txt);
if (!$)
return undefined;
// scrap through HTML
const entryTxt =
".wrap-content-primary:first-of-type .content-main .EntityList-items:first-of-type .EntityList-item:first-of-type ";
const title = $(entryTxt + "h3.entity-title:first-of-type a.link").text();
const url = $(entryTxt + "h3.entity-title:first-of-type a.link").attr("href");
const price = $(entryTxt + ".entity-prices .price-item:first-of-type .price")
.text()
.replace(/\s/g, "")
.replace("\n", "")
.split("€")[0]
.replace(",", ".");
return { title, url, price: Number(price) };
};
1
2
3
4
5
6
7
8
9
10
11
12
import { fetchText } from "https://esm.town/v/stevekrouse/fetchText?v=5";
export const webscrapeWikipediaIntro = (async () => {
const cheerio = await import("npm:cheerio");
const html = await fetchText(
"https://en.wikipedia.org/wiki/OpenAI",
);
const $ = cheerio.load(html);
// Cheerio accepts a CSS selector, here we pick the second <p>
const intro = $("p:nth-of-type(2)").first().text();
return intro;
})();
1
2
3
4
5
6
export const nbbaierImports = async () => {
const cheerio = await import("npm:cheerio");
const _ = await import("npm:lodash-es");
return { cheerio, _ };
};
// Forked from @nbbaier.imports