chore: replace flake8 and black with ruff

This commit is contained in:
aserper
2025-12-13 23:48:58 -05:00
parent 6c4ef36aa8
commit 561377d517
9 changed files with 79 additions and 52 deletions

View File

@@ -94,14 +94,13 @@ jobs:
python -m pip install --upgrade pip
pip install -r requirements-test.txt
- name: Run flake8
- name: Run ruff check
run: |
flake8 bot.py main.py --count --select=E9,F63,F7,F82 --show-source --statistics
flake8 bot.py main.py --count --max-complexity=10 --max-line-length=127 --statistics
ruff check .
- name: Run black check
- name: Run ruff format
run: |
black --check bot.py main.py test_bot.py test_integration.py
ruff format --check .
- name: Run mypy
run: |

View File

@@ -96,8 +96,8 @@ Tests run automatically on every push to `main` and on all pull requests via [.g
- Uploads coverage to Codecov
3. **Code Quality**
- Runs flake8 for linting
- Runs black for code formatting checks
- Runs ruff check for linting
- Runs ruff format for code formatting checks
- Runs mypy for type checking
4. **Docker Build Test**

View File

@@ -93,8 +93,7 @@ Comprehensive GitHub Actions workflow with **5 jobs**:
- Uploads coverage to Codecov
3. **Code Quality**
- Flake8 linting (error detection + complexity)
- Black formatting verification
- Ruff linting and formatting verification
- Mypy type checking
4. **Docker Build Test**

4
bot.py
View File

@@ -95,9 +95,7 @@ class MastodonRSSBot:
try:
feed = feedparser.parse(feed_url)
if hasattr(feed, "bozo_exception"):
logger.warning(
f"Feed parsing issue for {feed_url}: {feed.bozo_exception}"
)
logger.warning(f"Feed parsing issue for {feed_url}: {feed.bozo_exception}")
return feed
except Exception as e:
logger.error(f"Error parsing feed {feed_url}: {e}")

26
main.py
View File

@@ -27,9 +27,7 @@ class Config:
feed_urls: List[str] = field(default_factory=list)
toot_visibility: str = "public"
check_interval: int = 300
state_file: Path = field(
default_factory=lambda: Path("/state/processed_entries.txt")
)
state_file: Path = field(default_factory=lambda: Path("/state/processed_entries.txt"))
@classmethod
def from_env(cls) -> "Config":
@@ -50,9 +48,7 @@ class Config:
}.items()
if not v
]
raise ValueError(
f"Missing required environment variables: {', '.join(missing)}"
)
raise ValueError(f"Missing required environment variables: {', '.join(missing)}")
# Parse feeds
feed_urls = []
@@ -63,9 +59,7 @@ class Config:
# 2. Comma-separated list of feeds
if os.environ.get("RSS_FEEDS"):
feeds = [
url.strip() for url in os.environ["RSS_FEEDS"].split(",") if url.strip()
]
feeds = [url.strip() for url in os.environ["RSS_FEEDS"].split(",") if url.strip()]
feed_urls.extend(feeds)
# 3. File containing list of feeds
@@ -75,11 +69,7 @@ class Config:
if path.exists():
try:
content = path.read_text().splitlines()
file_feeds = [
line.strip()
for line in content
if line.strip() and not line.startswith("#")
]
file_feeds = [line.strip() for line in content if line.strip() and not line.startswith("#")]
feed_urls.extend(file_feeds)
except Exception as e:
logger.error(f"Error reading feeds file {feeds_file}: {e}")
@@ -90,9 +80,7 @@ class Config:
unique_feed_urls = list(dict.fromkeys(feed_urls))
if not unique_feed_urls:
raise ValueError(
"No RSS feeds configured. Please set RSS_FEED_URL, RSS_FEEDS, or FEEDS_FILE."
)
raise ValueError("No RSS feeds configured. Please set RSS_FEED_URL, RSS_FEEDS, or FEEDS_FILE.")
return cls(
instance_url=instance_url, # type: ignore # checked above
@@ -102,9 +90,7 @@ class Config:
feed_urls=unique_feed_urls,
toot_visibility=os.environ.get("TOOT_VISIBILITY", "public"),
check_interval=int(os.environ.get("CHECK_INTERVAL", "300")),
state_file=Path(
os.environ.get("PROCESSED_ENTRIES_FILE", "/state/processed_entries.txt")
),
state_file=Path(os.environ.get("PROCESSED_ENTRIES_FILE", "/state/processed_entries.txt")),
)

58
pyproject.toml Normal file
View File

@@ -0,0 +1,58 @@
[project]
name = "masto-rss"
version = "0.1.0"
description = "A Mastodon RSS bot"
dependencies = [
"feedparser>=6.0.10",
"Mastodon.py>=1.8.0",
"python-dateutil>=2.8.2",
"requests>=2.31.0",
]
requires-python = ">=3.10"
[tool.ruff]
# Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default.
# Unlike Flake8, Ruff doesn't enable pycodestyle warnings (`W`) or
# McCabe complexity (`C901`) by default.
select = ["E", "F", "W", "C90"]
ignore = []
# Allow fix for all enabled rules (when `--fix`) is provided.
fixable = ["ALL"]
unfixable = []
# Exclude a variety of commonly ignored directories.
exclude = [
".bzr",
".direnv",
".eggs",
".git",
".git-rewrite",
".hg",
".mypy_cache",
".nox",
".pants.d",
".pytype",
".ruff_cache",
".svn",
".tox",
".venv",
"__pypackages__",
"_build",
"buck-out",
"build",
"dist",
"node_modules",
"venv",
]
line-length = 127
# Allow unused variables when underscore-prefixed.
dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$"
# Assume Python 3.10
target-version = "py310"
[tool.ruff.mccabe]
# Unlike Flake8, default to a complexity level of 10.
max-complexity = 10

View File

@@ -11,8 +11,7 @@ pytest-cov==6.0.0
pytest-mock==3.14.0
# Code quality
flake8==7.1.1
black==25.12.0
ruff==0.1.6
mypy==1.19.0
# Coverage reporting

View File

@@ -1,12 +1,11 @@
"""Unit tests for Mastodon RSS Bot"""
import unittest
from unittest.mock import Mock, patch, mock_open, MagicMock
from unittest.mock import Mock, patch
import tempfile
import os
from pathlib import Path
from bot import MastodonRSSBot
import feedparser
class TestMastodonRSSBot(unittest.TestCase):
@@ -92,9 +91,7 @@ class TestMastodonRSSBot(unittest.TestCase):
bot = MastodonRSSBot(**self.test_config)
# Raise exception once, then KeyboardInterrupt to exit loop
bot.process_new_entries = Mock(
side_effect=[Exception("Network Error"), KeyboardInterrupt]
)
bot.process_new_entries = Mock(side_effect=[Exception("Network Error"), KeyboardInterrupt])
bot.run()
@@ -106,9 +103,7 @@ class TestMastodonRSSBot(unittest.TestCase):
def test_process_feed_new_entry(self, mock_mastodon, mock_parse):
"""Test processing feed with a new entry"""
mock_feed = Mock()
mock_feed.entries = [
{"title": "New", "link": "http://new.com", "description": "desc"}
]
mock_feed.entries = [{"title": "New", "link": "http://new.com", "description": "desc"}]
mock_parse.return_value = mock_feed
# Mock instance
@@ -163,14 +158,9 @@ class TestMastodonRSSBot(unittest.TestCase):
bot = MastodonRSSBot(**self.test_config)
bot.feed_urls = ["http://feed1.com", "http://feed2.com"]
with patch.object(
bot, "load_processed_entries", return_value=set()
), patch.object(
with patch.object(bot, "load_processed_entries", return_value=set()), patch.object(
bot, "process_feed", side_effect=[1, 2]
) as mock_process, patch.object(
bot, "save_processed_entries"
) as mock_save:
) as mock_process, patch.object(bot, "save_processed_entries") as mock_save:
total = bot.process_new_entries()
self.assertEqual(total, 3)

View File

@@ -65,9 +65,7 @@ class TestRSSFeedIntegration(unittest.TestCase):
"""Test parsing Atom feeds"""
# Create mock Atom feed object
mock_feed = Mock()
mock_feed.entries = [
{"title": "Atom Article", "link": "https://example.com/atom1"}
]
mock_feed.entries = [{"title": "Atom Article", "link": "https://example.com/atom1"}]
mock_parse.return_value = mock_feed
mock_instance = Mock()