1
0
forked from notBrad/bubo-rss

Compare commits

...

5 Commits

Author SHA1 Message Date
George Mandis
78629d2757 Updated packages 2023-06-03 15:32:13 -04:00
George Mandis
b450b6ed54 Removed old social media feed 2023-06-03 15:31:34 -04:00
George Mandis
ffdaf0b72b 2.0.2 2023-06-03 15:28:41 -04:00
George Mandis
5ebc0d4952 Fixing a bug where failing to fetch a feed could make Bubo hang 2023-06-03 15:28:29 -04:00
George Mandis
04696e9df6 Updating packages 2023-06-03 15:28:10 -04:00
4 changed files with 383 additions and 387 deletions

View File

@ -12,9 +12,6 @@
"https://george.mand.is/feed.xml", "https://george.mand.is/feed.xml",
"https://joy.recurse.com/feed.atom" "https://joy.recurse.com/feed.atom"
], ],
"Social": [
"https://social.mandis.dev/@georgemandis.rss"
],
"My GitHub Projects": [ "My GitHub Projects": [
"https://github.com/georgemandis.atom", "https://github.com/georgemandis.atom",
"https://github.com/georgemandis/bubo-rss/releases.atom", "https://github.com/georgemandis/bubo-rss/releases.atom",

708
package-lock.json generated

File diff suppressed because it is too large Load Diff

View File

@ -1,6 +1,6 @@
{ {
"name": "bubo-reader", "name": "bubo-reader",
"version": "2.0.1", "version": "2.0.2",
"description": "A simple but effective feed reader (RSS, JSON)", "description": "A simple but effective feed reader (RSS, JSON)",
"homepage": "https://github.com/georgemandis/bubo-rss", "homepage": "https://github.com/georgemandis/bubo-rss",
"main": "src/index.js", "main": "src/index.js",
@ -27,19 +27,19 @@
}, },
"license": "MIT", "license": "MIT",
"dependencies": { "dependencies": {
"chalk": "^5.1.2", "chalk": "^5.2.0",
"node-fetch": "^3.3.0", "node-fetch": "^3.3.1",
"nunjucks": "^3.2.3", "nunjucks": "^3.2.4",
"rss-parser": "^3.12.0" "rss-parser": "^3.13.0"
}, },
"devDependencies": { "devDependencies": {
"@types/node": "^16.18.4", "@types/node": "^20.2.5",
"@types/nunjucks": "^3.2.1", "@types/nunjucks": "^3.2.2",
"@types/xml2js": "^0.4.11", "@types/xml2js": "^0.4.11",
"@typescript-eslint/eslint-plugin": "^5.45.0", "@typescript-eslint/eslint-plugin": "^5.59.8",
"@typescript-eslint/parser": "^5.45.0", "@typescript-eslint/parser": "^5.59.8",
"eslint": "^8.29.0", "eslint": "^8.42.0",
"tslib": "^2.4.1", "tslib": "^2.5.3",
"typescript": "^4.9.3" "typescript": "^5.1.3"
} }
} }

View File

@ -66,7 +66,11 @@ let completed = 0;
* and we want to build the static output. * and we want to build the static output.
*/ */
const finishBuild: () => void = async () => { const finishBuild: () => void = async () => {
console.log("\nDone fetching everything!"); completed++;
// if this isn't the last feed, just return early
if (completed !== feedListLength) return;
process.stdout.write("\nDone fetching everything!\n");
// generate the static HTML output from our template renderer // generate the static HTML output from our template renderer
const output = render({ const output = render({
@ -77,10 +81,10 @@ const finishBuild: () => void = async () => {
// write the output to public/index.html // write the output to public/index.html
await writeFile("./public/index.html", output); await writeFile("./public/index.html", output);
console.log( process.stdout.write(
`\nFinished writing to output:\n- ${feedListLength} feeds in ${benchmark( `\nFinished writing to output:\n- ${feedListLength} feeds in ${benchmark(
initTime initTime
)}\n- ${errors.length} errors` )}\n- ${errors.length} errors\n`
); );
}; };
@ -103,8 +107,7 @@ const processFeed =
}) => }) =>
async (response: Response): Promise<void> => { async (response: Response): Promise<void> => {
const body = await parseFeed(response); const body = await parseFeed(response);
completed++; //skip to the next one if this didn't work out
// skip to the next one if this didn't work out
if (!body) return; if (!body) return;
try { try {
@ -124,20 +127,19 @@ const processFeed =
}); });
contentFromAllFeeds[group].push(contents as object); contentFromAllFeeds[group].push(contents as object);
console.log( process.stdout.write(
`${success("Successfully fetched:")} ${feed} - ${benchmark(startTime)}` `${success("Successfully fetched:")} ${feed} - ${benchmark(startTime)}\n`
); );
} catch (err) { } catch (err) {
console.log( process.stdout.write(
`${error("Error processing:")} ${feed} - ${benchmark( `${error("Error processing:")} ${feed} - ${benchmark(
startTime startTime
)}\n${err}` )}\n${err}\n`
); );
errors.push(`Error processing: ${feed}\n\t${err}`); errors.push(`Error processing: ${feed}\n\t${err}`);
} }
// if this is the last feed, go ahead and build the output finishBuild();
completed === feedListLength && finishBuild();
}; };
// go through each group of feeds and process // go through each group of feeds and process
@ -150,15 +152,16 @@ const processFeeds = () => {
for (const feed of feeds) { for (const feed of feeds) {
const startTime = Date.now(); const startTime = Date.now();
setTimeout(() => { setTimeout(() => {
console.log(`Fetching: ${feed}...`); process.stdout.write(`Fetching: ${feed}...\n`);
fetch(feed) fetch(feed)
.then(processFeed({ group, feed, startTime })) .then(processFeed({ group, feed, startTime }))
.catch(err => { .catch(err => {
console.log( process.stdout.write(
error(`Error fetching ${feed} ${benchmark(startTime)}`) error(`Error fetching ${feed} ${benchmark(startTime)}\n`)
); );
errors.push(`Error fetching ${feed} ${err.toString()}`); errors.push(`Error fetching ${feed} ${err.toString()}\n`);
finishBuild();
}); });
}, (idx % (feedListLength / MAX_CONNECTIONS)) * DELAY_MS); }, (idx % (feedListLength / MAX_CONNECTIONS)) * DELAY_MS);
idx++; idx++;