Create crawl.js
Browse files
crawl.js
ADDED
@@ -0,0 +1,44 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import {exists} from "https://deno.land/std/fs/mod.ts";
|
2 |
+
|
3 |
+
let subreddits = JSON.parse(await Deno.writeTextFile(`./top-5k-subreddits.json`);
|
4 |
+
|
5 |
+
while(1) {
|
6 |
+
|
7 |
+
await new Promise(r => setTimeout(r, 1000));
|
8 |
+
|
9 |
+
try {
|
10 |
+
|
11 |
+
let i = -1;
|
12 |
+
for(let s of subreddits) {
|
13 |
+
i++;
|
14 |
+
if(await exists(`./post-data-by-subreddit/${s}.json`)) continue;
|
15 |
+
let after = "";
|
16 |
+
let posts = [];
|
17 |
+
while(true) {
|
18 |
+
let url = `https://www.reddit.com/r/${s}/top.json?t=all&limit=100&after=${after}`;
|
19 |
+
console.log(i, url);
|
20 |
+
let result = await fetch(url).then(r => r.json()).catch(e => (console.error(e), false));
|
21 |
+
let data = result.data;
|
22 |
+
|
23 |
+
if(!data.children) console.log(JSON.stringify(data));
|
24 |
+
|
25 |
+
for(let post of data.children) {
|
26 |
+
delete post.data.all_awardings; // awards take up a lot of space (e.g. 20MB with vs 4MB without for r/funny)
|
27 |
+
}
|
28 |
+
posts.push(...data.children);
|
29 |
+
after = data.after;
|
30 |
+
if(!after) break;
|
31 |
+
if(!data.children.find(c => c.data.score > 5)) break;
|
32 |
+
}
|
33 |
+
await Deno.writeTextFile(`./post-data-by-subreddit/${s}.json`, JSON.stringify(posts));
|
34 |
+
}
|
35 |
+
|
36 |
+
console.log("FINISHED.");
|
37 |
+
break;
|
38 |
+
|
39 |
+
} catch(e) {
|
40 |
+
console.error(e);
|
41 |
+
await new Promise(r => setTimeout(r, 1000*60*15));
|
42 |
+
}
|
43 |
+
|
44 |
+
}
|