Back to APIs list

Reddit API examples & templates

Use these vals as a playground to view and fork Reddit API examples and templates on Val Town. Run any example below or find templates that can be used as a pre-built solution.
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
import { dataToRSS } from "https://esm.town/v/Glench/dataToRSS";
import { fetch } from "https://esm.town/v/std/fetch";
async function getOfficialRedditPosts() {
const resp = await fetch("https://www.reddit.com/r/googlephotos/.json");
if (!resp.ok) return [];
const json = await resp.json();
const filtered_posts = json.data.children.filter(x => {
const title = x.data.title.toLowerCase();
return title.includes("delet") || title.includes("remov") || title.includes("free");
});
return filtered_posts;
}
async function getGoogleSupportForumPosts() {
const resp = await fetch(
"https://support.google.com/photos/threads?hl=en&max_results=500",
);
const text = await resp.text();
// looking for " <a class="thread-list-thread" data-stats-id="247061455" data-stats-ve="87" href="/photos/thread/247061455/how-know-my-photo-record-deleted?hl=en" aria-label=" thread titled How know my photo record deleted FIX_PROBLEM Yas 0 Replies
const [_, ...threads_text] = text.split(/class="thread-list-thread"/);
return threads_text.map(x => {
const id = x.match(/\d+/)[0];
const url = `https://support.google.com${x.split("href=\"")[1].split("\"")[0]}`;
const title = x.split("thread-list-thread__title\">")[1].split("<span")[0].trim() + " (Google Support Forum)";
const selfText = x.split("\"thread-list-thread__snippet\">")[1].split("</span>")[0];
return { data: { created: id, url, title, selfText } };
}).filter(thread_obj => {
const title = thread_obj.data.title.toLowerCase();
return title.includes("delet") && !(
title.includes("recover")
|| title.includes("restor")
|| title.includes("perma")
|| title.includes("trash")
|| title.includes("back")
|| title.includes("deleted")
|| title.includes("mistak")
);
});
}
export const googlePhotosMarketingRSS = async (x) => {
const reddit_posts = await getOfficialRedditPosts();
const google_forum_posts = []; // await getGoogleSupportForumPosts(); // no signal anymore
return new Response(dataToRSS([...reddit_posts, ...google_forum_posts], {
title: "Delete All Google Photos Marketing RSS",
description:
"For marketing Delete All Google Photos Extension. Scrapes r/GooglePhotos and Google's official support forum.",
link: "https://www.reddit.com/r/googlephotos/",
item: {
title: x => x.data.title,
description: x => x.data.selfText,
link: x => x.data.url,
pubDate: x => x.data.created,
},
}));
};
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
import { fetch } from "https://esm.town/v/std/fetch";
import { Buffer } from "node:buffer";
import process from "node:process";
// The account's 2FA needs to be disabled.
export async function getRedditAccessToken() {
const formData = new URLSearchParams();
formData.append("grant_type", "password");
formData.append("username", process.env.reddit_username);
formData.append("password", process.env.reddit_password);
const authorization = "Basic "
+ Buffer.from(
`${process.env.reddit_client_id}:${process.env.reddit_client_secret}`,
).toString("base64");
const response = await fetch("https://www.reddit.com/api/v1/access_token", {
method: "POST",
body: formData,
headers: {
"Authorization": authorization,
"Content-Type": "application/x-www-form-urlencoded",
},
});
return (await response.json()).access_token;
}
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
import { fetch } from "https://esm.town/v/std/fetch";
import { Buffer } from "node:buffer";
export const authenticateReddit = async (
CLIENT_ID,
CLIENT_SECRET,
ACCESS_CODE,
REDIRECT_URI,
) => {
const headers = {};
headers["Authorization"] = `Basic ${
Buffer.from(
`${CLIENT_ID}:${CLIENT_SECRET}`,
).toString("base64")
}`;
headers["Content-Type"] = "application/x-www-form-urlencoded";
const formData = new URLSearchParams();
formData.append("grant_type", "authorization_code");
formData.append("code", ACCESS_CODE);
formData.append("redirect_uri", REDIRECT_URI);
let response;
try {
response = await fetch("https://www.reddit.com/api/v1/access_token", {
headers: headers,
method: "POST",
body: formData,
});
} catch (e) {
console.error(e);
return;
}
const body = await response.json();
return body;
};
1
2
3
4
5
6
import { fetchJSON } from "https://esm.town/v/stevekrouse/fetchJSON?v=41";
// Reddit recent posts from /r/aww (cute animals)
export let subredditExample = fetchJSON(
"https://www.reddit.com/r/aww/.json"
);

This allows copying specific comments from Reddit to a Lemmy thread.

Might be super specific to my use case, but we use this for example to copy over some content from the r/SpaceX Starship thread to the one on the Lemmy community (with their blessing).

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
import { set } from "https://esm.town/v/std/set?v=11";
import { spacexLemmyDb } from "https://esm.town/v/pdebie/spacexLemmyDb";
import { fetchRss } from "https://esm.town/v/pdebie/fetchRss";
export async function syncCommentToLemmy(
instance: string,
redditUrl: string,
postId: number,
auth: string,
matcher?: (author: string, content: string) => boolean,
) {
function hashCode(str) {
let hash = 0;
for (let i = 0, len = str.length; i < len; i++) {
let chr = str.charCodeAt(i);
hash = (hash << 5) - hash + chr;
hash |= 0; // Convert to 32bit integer
}
return hash;
}
const { LemmyHttp } = await import("npm:lemmy-js-client@0.18.1");
let client = new LemmyHttp(`https://${instance}`, {
fetchFunction: fetch,
});
// (client as any)["#fetchFunction"] = fetch;
let comments = (await fetchRss(redditUrl)).filter((i) =>
new Date(i.isoDate) >= new Date(Date.now() - 1000 * 60 * 60 * 24)
)
.filter((i) => matcher ? matcher(i.author, i.contentSnippet) : true);
const ret = [];
for (const comment of comments) {
const linkParts = comment.link.split("/");
const commentId = linkParts[linkParts.length - 2];
console.log("Found comment with id " + commentId);
const contentHash = hashCode(comment.contentSnippet);
const syncedContent =
`New comment from ${comment.author} [on Reddit](${comment.link}):\n\n ` +
comment.contentSnippet.replace(/\n/g, "\n\n") + `\n\n(This gets synced)`;
const existingStatus =
spacexLemmyDb.commentMap[commentId];
// New comment, let's create one
if (!existingStatus) {
// Step 1: write temp to DB so we don't try to recreate forever
spacexLemmyDb.commentMap[commentId] = {
lemmyId: undefined,
};
const ret = await client.createComment({
post_id: postId,
content: syncedContent,
auth,
});
// Step 2: Create comment on Lemmy
// Step 3: Update Database
spacexLemmyDb.commentMap[commentId] = {
lemmyId: ret.comment_view.comment.id,
contentHash,
};
}
else {
if (!existingStatus.lemmyId) {
console.log(
`Skipping comment ${commentId} because the previous run failed`,
);
continue;
}
if (existingStatus.contentHash !== contentHash) {
await client.editComment({
comment_id: existingStatus.lemmyId,
content: syncedContent,
auth,
});
existingStatus.contentHash = contentHash;
}
}
// TODO: Push to lemmy...
ret.push(commentId);
}
await set("spacexLemmyDb", spacexLemmyDb);
return comments;
}
1
2
3
4
5
6
7
8
9
10
import { checkReddit } from "https://esm.town/v/bnorick/checkReddit";
export let exampleCheckReddit = checkReddit({
check: {
username: "bnorick",
filters: {
buildapcsales: ["case", "20[89]0", { pattern: "ssd", mode: "i" }],
},
},
});
1
2
3
4
5
6
import { fetchJSON } from "https://esm.town/v/stevekrouse/fetchJSON?v=41";
// Reddit recent posts from /r/aww (cute animals)
export let subredditExample = fetchJSON(
"https://www.reddit.com/r/aww/.json"
);
1
2
3
4
5
6
7
8
9
10
11
12
import { checkReddit } from "https://esm.town/v/bnorick/checkReddit";
export let toggleSubreddits = async (subreddits) => {
let result = await checkReddit({ toggle: subreddits });
let messages = [];
for (let subreddit in result) {
messages.push(
`subreddit ${subreddit} is now ${result[subreddit] ? "dis" : "en"}abled`
);
}
return messages;
};
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
import { fetchJSON } from "https://esm.town/v/stevekrouse/fetchJSON?v=41";
import { redditNotifierState } from "https://esm.town/v/bnorick/redditNotifierState";
export async function redditAlertBackup({ lastRunAt }) {
let subreddit_matches = {};
for (const subreddit in redditNotifierState) {
console.log(`processing subreddit ${subreddit}`);
let state = redditNotifierState[subreddit];
let last = state.last;
let matches = [];
let data = await fetchJSON(
`https://www.reddit.com/r/${subreddit}/new/.json?before=${last}`
);
let new_posts = 0;
let count = data.data.dist;
while (count > 0) {
new_posts += count;
for (const post of data.data.children) {
let post_data = post.data;
for (const pattern_str of state.patterns) {
let pattern = RegExp(pattern_str, "gi");
if (
post_data.title.match(pattern) ||
post_data.selftext.match(pattern)
) {
matches.push({
url: post_data.url,
title: post_data.title,
pattern: pattern_str,
});
console.log(`found match ${post_data.url}`);
break;
}
}
}
last = data.data.children[0].data.name;
data = await fetchJSON(
`https://www.reddit.com/r/${subreddit}/new/.json?before=${last}`
);
count = data.data.dist;
}
console.log(`processed ${new_posts} posts`);
redditNotifierState[subreddit].last = last;
if (matches.length) {
subreddit_matches[subreddit] = matches;
}
}
let text = [];
let total_matches = 0;
let first = true;
for (let subreddit in subreddit_matches) {
if (!first) {
text.push("\n\n");
}
text.push(
`${subreddit}\nfound ${subreddit_matches[subreddit].length} matches\n`
);
for (let match of subreddit_matches[subreddit]) {
text.push(`${match.title}\n${match.url}\n`);
}
total_matches += subreddit_matches[subreddit].length;
first = false;
}
console.log(text.join("\n"));
if (text.length)
console.email(
text.join("\n"),
`Reddit Notifier (${total_matches} matches)`
);
}
1
2
3
4
5
6
import { fetchJSON } from "https://esm.town/v/stevekrouse/fetchJSON?v=41";
// Reddit recent posts from /r/aww (cute animals)
export let subredditExample = fetchJSON(
"https://www.reddit.com/r/aww/.json"
);
1
2
3
4
5
6
7
8
9
10
11
import { fetchJSON } from "https://esm.town/v/stevekrouse/fetchJSON?v=41";
export async function getInvitesFromReddit() {
const codeRegex = /bsky-social-[a-zA-Z0-9]+/g;
const result = await fetchJSON(
"https://www.reddit.com/r/blueskyinvites/.json"
);
const matches = JSON.stringify(result).match(codeRegex);
const codes = [...new Set(matches)];
return codes;
}
1
2
3
4
import { redditNewExample } from "https://esm.town/v/stevekrouse/redditNewExample";
export const redditNewExampleBefore =
redditNewExample.data.children[5].data.name;
1
2
3
4
5
6
import { fetchJSON } from "https://esm.town/v/stevekrouse/fetchJSON?v=41";
// Reddit recent posts from /r/aww (cute animals)
export let subredditExample = fetchJSON(
"https://www.reddit.com/r/aww/.json"
);
1
2
3
4
5
6
import { getInvitesFromReddit } from "https://esm.town/v/francoischalifour/getInvitesFromReddit";
export async function emailInvitesFromReddit() {
const codes = await getInvitesFromReddit();
console.email({ codes, count: codes.length }, "Invites from Reddit");
}

Polls the Vulkan specification repository for specification updates. If found, sends an email and makes a post on /r/vulkan.

Runs every 15 min
Fork
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
import { fetch } from "https://esm.town/v/std/fetch";
import { set } from "https://esm.town/v/std/set?v=11";
import { getRedditAccessToken } from "https://esm.town/v/tambre/getRedditAccessToken";
import { email } from "https://esm.town/v/std/email?v=9";
let { lastVulkanRSSFeedTime } = await import("https://esm.town/v/tambre/lastVulkanRSSFeedTime");
import { parseXML } from "https://esm.town/v/stevekrouse/parseXML?v=1";
export async function pollVulkanRSSFeed({ lastRunAt }: Interval) {
const githubProject = "https://github.com/KhronosGroup/Vulkan-Docs";
const response = await fetch(`${githubProject}/commits.atom`);
const feed =
(await parseXML(await response.text())).feed;
const entries = feed.entry.filter((entry) =>
new Date(lastVulkanRSSFeedTime) < new Date(entry.updated)
);
for (const entry of entries) {
const titles = entry.title.match(/Vulkan \d+\.\d+\.\d+ spec update/);
if (!titles || !titles.length) {
continue;
}
const title = titles[0];
const url = `${githubProject}/commit/${entry.id.match(/Commit\/(.+)/)[1]}`;
await email({
html: `<a href="${url}">Vulkan-Docs commit</a>\n\n${entry.content}`,
subject: title,
});
const formData = new URLSearchParams();
formData.append("kind", "link");
formData.append("sr", "vulkan");
formData.append("title", title);
formData.append("url", url);
formData.append("send_replies", "true");
const result = await fetch("https://oauth.reddit.com/api/submit", {
method: "POST",
headers: {
"Authorization": `Bearer ${await getRedditAccessToken()}`,
"Content-Type": "application/x-www-form-urlencoded",
},
body: formData.toString(),
});
console.info(`${result.status}: ${await result.text()}`);
lastVulkanRSSFeedTime = new Date(entry.updated).toISOString();
}
await set(
"lastVulkanRSSFeedTime",
lastVulkanRSSFeedTime,
);
return entries;
}