diff options
| author | TheSiahxyz <164138827+TheSiahxyz@users.noreply.github.com> | 2026-04-02 15:54:55 +0900 |
|---|---|---|
| committer | TheSiahxyz <164138827+TheSiahxyz@users.noreply.github.com> | 2026-04-02 15:54:55 +0900 |
| commit | bf4afbc0a3cc4e847ef01840365fd6a6ae9c142f (patch) | |
| tree | c8634b3b21534f550e2d255d98c4a068a1b567d0 /services/news-collector/src/news_collector/main.py | |
| parent | ec8b6fea5a4a710df4b2ae18f3f399d165c8ffd4 (diff) | |
style: auto-fix lint violations from enhanced ruff rules
Diffstat (limited to 'services/news-collector/src/news_collector/main.py')
| -rw-r--r-- | services/news-collector/src/news_collector/main.py | 44 |
1 files changed, 21 insertions, 23 deletions
diff --git a/services/news-collector/src/news_collector/main.py b/services/news-collector/src/news_collector/main.py index af0cd20..7265f00 100644 --- a/services/news-collector/src/news_collector/main.py +++ b/services/news-collector/src/news_collector/main.py @@ -1,10 +1,18 @@ """News Collector Service — fetches news from multiple sources and aggregates sentiment.""" import asyncio -from datetime import datetime, timezone +from datetime import UTC, datetime import aiohttp +from news_collector.collectors.fear_greed import FearGreedCollector +from news_collector.collectors.fed import FedCollector +from news_collector.collectors.finnhub import FinnhubCollector +from news_collector.collectors.reddit import RedditCollector +from news_collector.collectors.rss import RSSCollector +from news_collector.collectors.sec_edgar import SecEdgarCollector +from news_collector.collectors.truth_social import TruthSocialCollector +from news_collector.config import NewsCollectorConfig from shared.broker import RedisBroker from shared.db import Database from shared.events import NewsEvent @@ -13,19 +21,10 @@ from shared.logging import setup_logging from shared.metrics import ServiceMetrics from shared.models import NewsItem from shared.notifier import TelegramNotifier -from shared.sentiment_models import MarketSentiment from shared.sentiment import SentimentAggregator +from shared.sentiment_models import MarketSentiment from shared.shutdown import GracefulShutdown -from news_collector.config import NewsCollectorConfig -from news_collector.collectors.finnhub import FinnhubCollector -from news_collector.collectors.rss import RSSCollector -from news_collector.collectors.sec_edgar import SecEdgarCollector -from news_collector.collectors.truth_social import TruthSocialCollector -from news_collector.collectors.reddit import RedditCollector -from news_collector.collectors.fear_greed import FearGreedCollector -from news_collector.collectors.fed import FedCollector - # Health check port: base + 4 HEALTH_PORT_OFFSET = 4 @@ -56,7 +55,7 @@ async def run_collector_loop(collector, db: Database, broker: RedisBroker, log) collector=collector.name, count=count, ) - except (aiohttp.ClientError, ConnectionError, TimeoutError, asyncio.TimeoutError) as exc: + except (aiohttp.ClientError, ConnectionError, TimeoutError) as exc: log.warning( "collector_network_error", collector=collector.name, @@ -83,7 +82,7 @@ async def run_fear_greed_loop(collector: FearGreedCollector, db: Database, log) vix=None, fed_stance="neutral", market_regime=_determine_regime(result.fear_greed, None), - updated_at=datetime.now(timezone.utc), + updated_at=datetime.now(UTC), ) await db.upsert_market_sentiment(ms) log.info( @@ -91,7 +90,7 @@ async def run_fear_greed_loop(collector: FearGreedCollector, db: Database, log) value=result.fear_greed, label=result.fear_greed_label, ) - except (aiohttp.ClientError, ConnectionError, TimeoutError, asyncio.TimeoutError) as exc: + except (aiohttp.ClientError, ConnectionError, TimeoutError) as exc: log.warning("fear_greed_network_error", error=str(exc)) except (ValueError, KeyError, TypeError) as exc: log.warning("fear_greed_parse_error", error=str(exc)) @@ -104,13 +103,13 @@ async def run_aggregator_loop(db: Database, interval: int, log) -> None: while True: await asyncio.sleep(interval) try: - now = datetime.now(timezone.utc) + now = datetime.now(UTC) news_items = await db.get_recent_news(hours=24) scores = aggregator.aggregate(news_items, now) for score in scores.values(): await db.upsert_symbol_score(score) log.info("aggregation_complete", symbols=len(scores)) - except (ConnectionError, TimeoutError, asyncio.TimeoutError) as exc: + except (ConnectionError, TimeoutError) as exc: log.warning("aggregator_network_error", error=str(exc)) except (ValueError, KeyError, TypeError) as exc: log.warning("aggregator_parse_error", error=str(exc)) @@ -167,14 +166,13 @@ async def run() -> None: ) try: - tasks = [] - for collector in news_collectors: - tasks.append( - asyncio.create_task( - run_collector_loop(collector, db, broker, log), - name=f"collector-{collector.name}", - ) + tasks = [ + asyncio.create_task( + run_collector_loop(collector, db, broker, log), + name=f"collector-{collector.name}", ) + for collector in news_collectors + ] tasks.append( asyncio.create_task( run_fear_greed_loop(fear_greed, db, log), |
