forked from notBrad/bubo-rss
15
src/@types/bubo.d.ts
vendored
Normal file
15
src/@types/bubo.d.ts
vendored
Normal file
@@ -0,0 +1,15 @@
|
||||
export interface Feeds {
|
||||
[key: string]: object[]
|
||||
}
|
||||
export interface FeedItem {
|
||||
[key: string]: string | number | Date | FeedItem[];
|
||||
items: FeedItem[]
|
||||
}
|
||||
|
||||
//NEW WAY
|
||||
export type JSONValue =
|
||||
| string
|
||||
| number
|
||||
| boolean
|
||||
| { [x: string]: JSONValue }
|
||||
| Array<JSONValue>;
|
||||
@@ -1,25 +0,0 @@
|
||||
{
|
||||
"Web Development": [
|
||||
"https://hacks.mozilla.org/feed/",
|
||||
"https://blog.mozilla.org/feed/",
|
||||
"https://web.dev/feed.xml",
|
||||
"https://v8.dev/blog.atom",
|
||||
"https://alistapart.com/main/feed/",
|
||||
"https://css-tricks.com/feed/",
|
||||
"https://dev.to/feed"
|
||||
],
|
||||
"Blogs": [
|
||||
"https://george.mand.is/feed.xml",
|
||||
"https://joy.recurse.com/feed.atom"
|
||||
],
|
||||
"My GitHub Projects": [
|
||||
"https://github.com/georgemandis.atom",
|
||||
"https://github.com/georgemandis/bubo-rss/releases.atom",
|
||||
"https://github.com/georgemandis/konami-js/releases.atom",
|
||||
"https://github.com/georgemandis/konami-js/commits/main.atom",
|
||||
"https://github.com/javascriptforartists/cheer-me-up-and-sing-me-a-song/commits/master.atom",
|
||||
"https://github.com/georgemandis/circuit-playground-midi-multi-tool/commits/master.atom",
|
||||
"https://github.com/georgemandis/remote-working-list/commits/master.atom",
|
||||
"https://github.com/georgemandis/tweeter-totter/commits/master.atom"
|
||||
]
|
||||
}
|
||||
129
src/index.js
129
src/index.js
@@ -1,129 +0,0 @@
|
||||
/*
|
||||
* 🦉 Bubo RSS Reader
|
||||
* ====
|
||||
* Dead simple feed reader that renders an HTML
|
||||
* page with links to content from feeds organized by site
|
||||
*
|
||||
* Code: https://github.com/georgemandis/bubo-rss
|
||||
* Copyright (c) 2019 George Mandis (https://george.mand.is)
|
||||
* Version: 1.0.1 (11/14/2021)
|
||||
* Licensed under the MIT License (http://opensource.org/licenses/MIT)
|
||||
*/
|
||||
|
||||
const fetch = require("node-fetch");
|
||||
const Parser = require("rss-parser");
|
||||
const parser = new Parser();
|
||||
|
||||
const nunjucks = require("nunjucks");
|
||||
const env = nunjucks.configure({ autoescape: true });
|
||||
|
||||
const feeds = require("./feeds.json");
|
||||
|
||||
/**
|
||||
* Global filters for my Nunjucks templates
|
||||
*/
|
||||
env.addFilter("formatDate", function (dateString) {
|
||||
const formattedDate = new Date(dateString).toLocaleDateString()
|
||||
return formattedDate !== 'Invalid Date' ? formattedDate : dateString;
|
||||
});
|
||||
|
||||
env.addGlobal('now', (new Date()).toUTCString());
|
||||
|
||||
// parse RSS/XML or JSON feeds
|
||||
function parseFeed(response) {
|
||||
const contentType = response.headers.get("content-type")
|
||||
? response.headers.get("content-type").split(";")[0]
|
||||
: false;
|
||||
|
||||
const rssFeed = [contentType]
|
||||
.map(item =>
|
||||
[
|
||||
"application/atom+xml",
|
||||
"application/rss+xml",
|
||||
"application/xml",
|
||||
"text/xml",
|
||||
"text/html" // this is kind of a gamble
|
||||
].includes(item)
|
||||
? response.text()
|
||||
: false
|
||||
)
|
||||
.filter(_ => _)[0];
|
||||
|
||||
const jsonFeed = [contentType]
|
||||
.map(item =>
|
||||
["application/json", "application/feed+json"].includes(item) ? response.json() : false
|
||||
)
|
||||
.filter(_ => _)[0];
|
||||
|
||||
return rssFeed || jsonFeed || false;
|
||||
}
|
||||
|
||||
/*
|
||||
There's a little inconcistency with how feeds report certain things like
|
||||
title, links and timestamps. These helpers try to normalize that bit and
|
||||
provide an order-of-operations list of properties to look for.
|
||||
|
||||
Note: these are tightly-coupled to the template and a personal preference.
|
||||
*/
|
||||
|
||||
const getLink = (obj) => {
|
||||
const link_values = ["link", "url", "guid", "home_page_url"];
|
||||
const keys = Object.keys(obj);
|
||||
const link_property = link_values.find(link_value => keys.includes(link_value));
|
||||
return obj[link_property];
|
||||
}
|
||||
|
||||
|
||||
// fallback to URL for the title if not present (coupled to my template)
|
||||
const getTitle = (obj) => {
|
||||
const title_values = ["title", "url", "link"]; // fallback to url/link as title if omitted
|
||||
const keys = Object.keys(obj);
|
||||
const title_property = title_values.find(title_value => keys.includes(title_value));
|
||||
return obj[title_property];
|
||||
}
|
||||
|
||||
// More dependable way to get timestamps
|
||||
const getTimestamp = (obj) => {
|
||||
const timestamp = new Date(obj.pubDate || obj.isoDate || obj.date || obj.date_published).getTime();
|
||||
return isNaN(timestamp) ? (obj.pubDate || obj.isoDate || obj.date || obj.date_published) : timestamp;
|
||||
}
|
||||
|
||||
// fetch the feeds and build the object for our template
|
||||
(async () => {
|
||||
const contentFromAllFeeds = {};
|
||||
const errors = [];
|
||||
|
||||
for (const group in feeds) {
|
||||
contentFromAllFeeds[group] = [];
|
||||
|
||||
for (let index = 0; index < feeds[group].length; index++) {
|
||||
try {
|
||||
const response = await fetch(feeds[group][index]);
|
||||
const body = await parseFeed(response);
|
||||
const contents =
|
||||
typeof body === "string" ? await parser.parseString(body) : body;
|
||||
|
||||
contents.feed = feeds[group][index];
|
||||
contents.title = getTitle(contents);
|
||||
contents.link = getLink(contents);
|
||||
contentFromAllFeeds[group].push(contents);
|
||||
|
||||
// try to normalize date attribute naming
|
||||
contents?.items?.forEach(item => {
|
||||
item.timestamp = getTimestamp(item);
|
||||
item.title = getTitle(item);
|
||||
item.link = getLink(item);
|
||||
});
|
||||
|
||||
} catch (error) {
|
||||
errors.push(feeds[group][index]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
const output = env.render("./src/template.html", {
|
||||
data: contentFromAllFeeds,
|
||||
errors: errors
|
||||
});
|
||||
console.log(output);
|
||||
})();
|
||||
143
src/index.ts
Normal file
143
src/index.ts
Normal file
@@ -0,0 +1,143 @@
|
||||
/*
|
||||
* 🦉 Bubo Reader
|
||||
* ====
|
||||
* Dead simple feed reader (RSS + JSON) that renders an HTML
|
||||
* page with links to content from feeds organized by site
|
||||
*
|
||||
* Code: https://github.com/georgemandis/bubo-rss
|
||||
* Copyright (c) 2019 George Mandis (https://george.mand.is)
|
||||
* Version: 1.0.1 (11/14/2021)
|
||||
* Licensed under the MIT License (http://opensource.org/licenses/MIT)
|
||||
*/
|
||||
|
||||
import fetch from "node-fetch";
|
||||
import Parser from "rss-parser";
|
||||
import { Feeds, FeedItem } from "./@types/bubo";
|
||||
import { Response } from "node-fetch";
|
||||
import { render } from "./renderer.js";
|
||||
import { getLink, getTitle, getTimestamp, parseFeed, getFeedList } from "./utilities.js";
|
||||
import { writeFile } from "fs/promises";
|
||||
import chalk from "chalk";
|
||||
|
||||
const parser = new Parser();
|
||||
const feedList = await getFeedList();
|
||||
const feedListLength = Object.entries(feedList).flat(2).length - Object.keys(feedList).length;
|
||||
|
||||
/**
|
||||
* contentFromAllFeeds = Contains normalized, aggregated feed data and is passed to template renderer at the end
|
||||
* errors = Contains errors from parsing feeds and is also passed to template.
|
||||
*/
|
||||
const contentFromAllFeeds: Feeds = {};
|
||||
const errors: unknown[] = [];
|
||||
|
||||
// benchmarking data + utility
|
||||
const initTime = Date.now();
|
||||
const benchmark = (startTime: number) => chalk.cyanBright.bold(`(${(Date.now() - startTime) / 1000} seconds)`);
|
||||
|
||||
/**
|
||||
* These values are used to control throttling/batching the fetches:
|
||||
* - MAX_CONNECTION = max number of fetches to contain in a batch
|
||||
* - DELAY_MS = the delay in milliseconds between batches
|
||||
*/
|
||||
const MAX_CONNECTIONS = Infinity;
|
||||
const DELAY_MS = 850;
|
||||
|
||||
const error = chalk.bold.red;
|
||||
const success = chalk.bold.green;
|
||||
|
||||
// keeping tally of total feeds fetched and parsed so we can compare
|
||||
// to feedListLength and know when we're finished.
|
||||
let completed = 0;
|
||||
|
||||
|
||||
/**
|
||||
* finishBuild
|
||||
* --
|
||||
* function that gets called when all the feeds are through fetching
|
||||
* and we want to build the static output.
|
||||
*/
|
||||
const finishBuild: () => void = async () => {
|
||||
console.log("\nDone fetching everything!");
|
||||
|
||||
// generate the static HTML output from our template renderer
|
||||
const output = render({
|
||||
data: contentFromAllFeeds,
|
||||
errors: errors
|
||||
});
|
||||
|
||||
// write the output to public/index.html
|
||||
await writeFile("./public/index.html", output);
|
||||
console.log(`Finished writing to output. ${benchmark(initTime)}`);
|
||||
};
|
||||
|
||||
/**
|
||||
* processFeed
|
||||
* --
|
||||
* Process an individual feed and normalize its items
|
||||
* @param { group, feed, startTime}
|
||||
* @returns Promise<void>
|
||||
*/
|
||||
const processFeed = (
|
||||
{
|
||||
group, feed, startTime
|
||||
}: { group: string; feed: string, startTime: number }
|
||||
) => async (response: Response): Promise<void> => {
|
||||
const body = await parseFeed(response);
|
||||
completed++;
|
||||
// skip to the next one if this didn't work out
|
||||
if (!body) return;
|
||||
|
||||
try {
|
||||
const contents: FeedItem =
|
||||
(typeof body === "string" ? (await parser.parseString(body)) : body) as FeedItem;
|
||||
|
||||
contents.feed = feed;
|
||||
contents.title = getTitle(contents);
|
||||
contents.link = getLink(contents);
|
||||
|
||||
// try to normalize date attribute naming
|
||||
contents?.items?.forEach((item) => {
|
||||
item.timestamp = getTimestamp(item);
|
||||
item.title = getTitle(item);
|
||||
item.link = getLink(item);
|
||||
});
|
||||
|
||||
contentFromAllFeeds[group].push(contents as object);
|
||||
console.log(`${success("Successfully fetched:")} ${feed} ${benchmark(startTime)}`);
|
||||
|
||||
} catch (err) {
|
||||
console.log(`${error("Error processing:")} ${feed} ${benchmark(startTime)}`);
|
||||
errors.push(err);
|
||||
}
|
||||
|
||||
// if this is the last feed, go ahead and build the output
|
||||
(completed === feedListLength) && finishBuild();
|
||||
};
|
||||
|
||||
|
||||
// go through each group of feeds and process
|
||||
const processFeeds = () => {
|
||||
let idx = 0;
|
||||
|
||||
for (const [group, feeds] of Object.entries(feedList)) {
|
||||
contentFromAllFeeds[group] = [];
|
||||
|
||||
for (const feed of feeds) {
|
||||
const startTime = Date.now();
|
||||
setTimeout(() => {
|
||||
console.log(`Fetching: ${feed}...`);
|
||||
|
||||
fetch(feed).then(processFeed({ group, feed, startTime })).catch(err => {
|
||||
console.log(error(`Error fetching ${feed} ${benchmark(startTime)}`));
|
||||
errors.push(`Error fetching ${feed} ${err.toString()}`);
|
||||
});
|
||||
|
||||
}, (idx % (feedListLength / MAX_CONNECTIONS)) * DELAY_MS);
|
||||
idx++;
|
||||
}
|
||||
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
processFeeds();
|
||||
37
src/renderer.ts
Normal file
37
src/renderer.ts
Normal file
@@ -0,0 +1,37 @@
|
||||
/*
|
||||
* Return our renderer.
|
||||
* Using Nunjucks out of the box.
|
||||
* https://mozilla.github.io/nunjucks/
|
||||
*/
|
||||
|
||||
import nunjucks from "nunjucks";
|
||||
const env: nunjucks.Environment = nunjucks.configure({ autoescape: true });
|
||||
import { readFile } from "fs/promises";
|
||||
import { Feeds } from "./@types/bubo";
|
||||
|
||||
/**
|
||||
* Global filters for my Nunjucks templates
|
||||
*/
|
||||
env.addFilter("formatDate", function (dateString): string {
|
||||
const date: Date = new Date(parseInt(dateString));
|
||||
return !isNaN(date.getTime()) ? date.toLocaleDateString() : dateString;
|
||||
});
|
||||
|
||||
env.addGlobal("now", (new Date()).toUTCString());
|
||||
|
||||
// load the template
|
||||
const template: string =
|
||||
(await readFile(
|
||||
new URL("../config/template.html", import.meta.url)
|
||||
)).toString();
|
||||
|
||||
// generate the static HTML output from our template renderer
|
||||
const render = ({ data, errors }: { data: Feeds; errors: unknown[] }) => {
|
||||
return env.renderString(template, {
|
||||
data,
|
||||
errors
|
||||
});
|
||||
};
|
||||
|
||||
|
||||
export { render };
|
||||
@@ -1,48 +0,0 @@
|
||||
<!DOCTYPE html>
|
||||
<html lang="en">
|
||||
<head>
|
||||
<meta charset="UTF-8">
|
||||
<meta name="viewport" content="width=device-width, initial-scale=1.0">
|
||||
<meta http-equiv="X-UA-Compatible" content="ie=edge">
|
||||
<title>🦉 Bubo Reader</title>
|
||||
<link rel="stylesheet" href="/style.css">
|
||||
</head>
|
||||
<body>
|
||||
|
||||
<h1>🦉 Bubo Reader</h1>
|
||||
|
||||
{% for group, feeds in data %}
|
||||
<h2>{{ group }}</h2>
|
||||
{% for feed in feeds %}
|
||||
<details>
|
||||
<summary>
|
||||
<span class="feed-title">{{ feed.title }}</span>
|
||||
<span class="feed-url">({{ feed.feed }})</span>
|
||||
</summary>
|
||||
<ul>
|
||||
{% for item in feed.items %}
|
||||
<li>
|
||||
{{ item.timestamp | formatDate }} - <a href="{{ item.link }}" target='_blank' rel='noopener norefferer nofollow'>{{ item.title }}</a>
|
||||
</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
</details>
|
||||
{% endfor %}
|
||||
{% endfor %}
|
||||
|
||||
{% if errors | length > 0 %}
|
||||
<h2>Errors</h2>
|
||||
<p>There were errors trying to parse these feeds:</p>
|
||||
<ul>
|
||||
{% for error in errors %}
|
||||
<li>{{ error }}</li>
|
||||
{% endfor %}
|
||||
</ul>
|
||||
{% endif %}
|
||||
|
||||
<hr>
|
||||
<p>
|
||||
Last updated {{ now }}. Powered by <a href="https://github.com/georgemandis/bubo-rss">Bubo Reader</a>, a project by <a href="https://george.mand.is">George Mandis</a>
|
||||
</p>
|
||||
</body>
|
||||
</html>
|
||||
74
src/utilities.ts
Normal file
74
src/utilities.ts
Normal file
@@ -0,0 +1,74 @@
|
||||
/*
|
||||
There's a little inconcistency with how feeds report certain things like
|
||||
title, links and timestamps. These helpers try to normalize that bit and
|
||||
provide an order-of-operations list of properties to look for.
|
||||
|
||||
Note: these are tightly-coupled to the template and a personal preference.
|
||||
*/
|
||||
|
||||
import { Response } from "node-fetch";
|
||||
import { readFile } from "fs/promises";
|
||||
import { FeedItem, JSONValue } from "./@types/bubo";
|
||||
|
||||
export const getLink = (obj: FeedItem): string => {
|
||||
const link_values: string[] = ["link", "url", "guid", "home_page_url"];
|
||||
const keys: string[] = Object.keys(obj);
|
||||
const link_property: string | undefined = link_values.find(link_value => keys.includes(link_value));
|
||||
return link_property ? obj[link_property] as string : "";
|
||||
};
|
||||
|
||||
|
||||
// fallback to URL for the title if not present (coupled to my template)
|
||||
export const getTitle = (obj: FeedItem): string => {
|
||||
const title_values: string[] = ["title", "url", "link"]; // fallback to url/link as title if omitted
|
||||
const keys: string[] = Object.keys(obj);
|
||||
const title_property: string | undefined = title_values.find(title_value => keys.includes(title_value));
|
||||
return title_property ? obj[title_property] as string : "";
|
||||
};
|
||||
|
||||
// More dependable way to get timestamps
|
||||
export const getTimestamp = (obj: FeedItem): string => {
|
||||
const dateString: string = (obj.pubDate || obj.isoDate || obj.date || obj.date_published).toString();
|
||||
const timestamp: number = new Date(dateString).getTime();
|
||||
return isNaN(timestamp) ? dateString : timestamp.toString();
|
||||
};
|
||||
|
||||
|
||||
// parse RSS/XML or JSON feeds
|
||||
export async function parseFeed(response: Response): Promise<JSONValue> {
|
||||
const contentType = response.headers.get("content-type")?.split(";")[0];
|
||||
|
||||
if (!contentType) return {};
|
||||
|
||||
const rssFeed = [contentType]
|
||||
.map(item =>
|
||||
[
|
||||
"application/atom+xml",
|
||||
"application/rss+xml",
|
||||
"application/xml",
|
||||
"text/xml",
|
||||
"text/html" // this is kind of a gamble
|
||||
].includes(item)
|
||||
? response.text()
|
||||
: false
|
||||
)
|
||||
.filter(_ => _)[0];
|
||||
|
||||
const jsonFeed = [contentType]
|
||||
.map(item =>
|
||||
["application/json", "application/feed+json"].includes(item) ? response.json() as Promise<JSONValue> : false
|
||||
)
|
||||
.filter(_ => _)[0];
|
||||
|
||||
return (rssFeed && rssFeed) || (jsonFeed && jsonFeed) || {};
|
||||
}
|
||||
|
||||
|
||||
export const getFeedList = async (): Promise<JSONValue> => {
|
||||
return JSON.parse(
|
||||
(await readFile(
|
||||
new URL("../config/feeds.json", import.meta.url)
|
||||
)).toString()
|
||||
);
|
||||
};
|
||||
|
||||
Reference in New Issue
Block a user