summaryrefslogtreecommitdiff
path: root/services/news-collector/src
diff options
context:
space:
mode:
Diffstat (limited to 'services/news-collector/src')
-rw-r--r--services/news-collector/src/news_collector/collectors/fear_greed.py5
-rw-r--r--services/news-collector/src/news_collector/collectors/fed.py6
-rw-r--r--services/news-collector/src/news_collector/collectors/finnhub.py4
-rw-r--r--services/news-collector/src/news_collector/collectors/reddit.py4
-rw-r--r--services/news-collector/src/news_collector/collectors/rss.py6
-rw-r--r--services/news-collector/src/news_collector/collectors/sec_edgar.py10
-rw-r--r--services/news-collector/src/news_collector/collectors/truth_social.py4
-rw-r--r--services/news-collector/src/news_collector/main.py44
8 files changed, 39 insertions, 44 deletions
diff --git a/services/news-collector/src/news_collector/collectors/fear_greed.py b/services/news-collector/src/news_collector/collectors/fear_greed.py
index f79f716..42e8f88 100644
--- a/services/news-collector/src/news_collector/collectors/fear_greed.py
+++ b/services/news-collector/src/news_collector/collectors/fear_greed.py
@@ -2,7 +2,6 @@
import logging
from dataclasses import dataclass
-from typing import Optional
import aiohttp
@@ -26,7 +25,7 @@ class FearGreedCollector(BaseCollector):
async def is_available(self) -> bool:
return True
- async def _fetch_index(self) -> Optional[dict]:
+ async def _fetch_index(self) -> dict | None:
headers = {"User-Agent": "Mozilla/5.0"}
try:
async with aiohttp.ClientSession() as session:
@@ -50,7 +49,7 @@ class FearGreedCollector(BaseCollector):
return "Greed"
return "Extreme Greed"
- async def collect(self) -> Optional[FearGreedResult]:
+ async def collect(self) -> FearGreedResult | None:
data = await self._fetch_index()
if data is None:
return None
diff --git a/services/news-collector/src/news_collector/collectors/fed.py b/services/news-collector/src/news_collector/collectors/fed.py
index fce4842..52128e5 100644
--- a/services/news-collector/src/news_collector/collectors/fed.py
+++ b/services/news-collector/src/news_collector/collectors/fed.py
@@ -3,7 +3,7 @@
import asyncio
import logging
from calendar import timegm
-from datetime import datetime, timezone
+from datetime import UTC, datetime
import feedparser
from nltk.sentiment.vader import SentimentIntensityAnalyzer
@@ -76,10 +76,10 @@ class FedCollector(BaseCollector):
if published_parsed:
try:
ts = timegm(published_parsed)
- return datetime.fromtimestamp(ts, tz=timezone.utc)
+ return datetime.fromtimestamp(ts, tz=UTC)
except Exception:
pass
- return datetime.now(timezone.utc)
+ return datetime.now(UTC)
async def collect(self) -> list[NewsItem]:
try:
diff --git a/services/news-collector/src/news_collector/collectors/finnhub.py b/services/news-collector/src/news_collector/collectors/finnhub.py
index 13e3602..67cb455 100644
--- a/services/news-collector/src/news_collector/collectors/finnhub.py
+++ b/services/news-collector/src/news_collector/collectors/finnhub.py
@@ -1,7 +1,7 @@
"""Finnhub news collector with VADER sentiment analysis."""
import logging
-from datetime import datetime, timezone
+from datetime import UTC, datetime
import aiohttp
from nltk.sentiment.vader import SentimentIntensityAnalyzer
@@ -64,7 +64,7 @@ class FinnhubCollector(BaseCollector):
sentiment = sentiment_scores["compound"]
ts = article.get("datetime", 0)
- published_at = datetime.fromtimestamp(ts, tz=timezone.utc)
+ published_at = datetime.fromtimestamp(ts, tz=UTC)
related = article.get("related", "")
symbols = [t.strip() for t in related.split(",") if t.strip()] if related else []
diff --git a/services/news-collector/src/news_collector/collectors/reddit.py b/services/news-collector/src/news_collector/collectors/reddit.py
index 226a2f9..4e9d6f5 100644
--- a/services/news-collector/src/news_collector/collectors/reddit.py
+++ b/services/news-collector/src/news_collector/collectors/reddit.py
@@ -2,7 +2,7 @@
import logging
import re
-from datetime import datetime, timezone
+from datetime import UTC, datetime
import aiohttp
from nltk.sentiment.vader import SentimentIntensityAnalyzer
@@ -78,7 +78,7 @@ class RedditCollector(BaseCollector):
symbols = list(dict.fromkeys(_TICKER_PATTERN.findall(combined)))
created_utc = post_data.get("created_utc", 0)
- published_at = datetime.fromtimestamp(created_utc, tz=timezone.utc)
+ published_at = datetime.fromtimestamp(created_utc, tz=UTC)
items.append(
NewsItem(
diff --git a/services/news-collector/src/news_collector/collectors/rss.py b/services/news-collector/src/news_collector/collectors/rss.py
index ddf8503..bca0e9f 100644
--- a/services/news-collector/src/news_collector/collectors/rss.py
+++ b/services/news-collector/src/news_collector/collectors/rss.py
@@ -3,7 +3,7 @@
import asyncio
import logging
import re
-from datetime import datetime, timezone
+from datetime import UTC, datetime
from time import mktime
import feedparser
@@ -56,10 +56,10 @@ class RSSCollector(BaseCollector):
if parsed_time:
try:
ts = mktime(parsed_time)
- return datetime.fromtimestamp(ts, tz=timezone.utc)
+ return datetime.fromtimestamp(ts, tz=UTC)
except Exception:
pass
- return datetime.now(timezone.utc)
+ return datetime.now(UTC)
async def collect(self) -> list[NewsItem]:
try:
diff --git a/services/news-collector/src/news_collector/collectors/sec_edgar.py b/services/news-collector/src/news_collector/collectors/sec_edgar.py
index ca1d070..d88518f 100644
--- a/services/news-collector/src/news_collector/collectors/sec_edgar.py
+++ b/services/news-collector/src/news_collector/collectors/sec_edgar.py
@@ -1,13 +1,13 @@
"""SEC EDGAR filing collector (free, no API key required)."""
import logging
-from datetime import datetime, timezone
+from datetime import UTC, datetime
import aiohttp
from nltk.sentiment.vader import SentimentIntensityAnalyzer
-from shared.models import NewsCategory, NewsItem
from news_collector.collectors.base import BaseCollector
+from shared.models import NewsCategory, NewsItem
logger = logging.getLogger(__name__)
@@ -58,7 +58,7 @@ class SecEdgarCollector(BaseCollector):
async def collect(self) -> list[NewsItem]:
filings_data = await self._fetch_recent_filings()
items = []
- today = datetime.now(timezone.utc).strftime("%Y-%m-%d")
+ today = datetime.now(UTC).strftime("%Y-%m-%d")
for company_data in filings_data:
tickers = [t["ticker"] for t in company_data.get("tickers", [])]
@@ -87,9 +87,7 @@ class SecEdgarCollector(BaseCollector):
headline=headline,
summary=desc,
url=f"https://www.sec.gov/cgi-bin/browse-edgar?action=getcompany&accession={accession}",
- published_at=datetime.strptime(filing_date, "%Y-%m-%d").replace(
- tzinfo=timezone.utc
- ),
+ published_at=datetime.strptime(filing_date, "%Y-%m-%d").replace(tzinfo=UTC),
symbols=tickers,
sentiment=self._vader.polarity_scores(headline)["compound"],
category=NewsCategory.FILING,
diff --git a/services/news-collector/src/news_collector/collectors/truth_social.py b/services/news-collector/src/news_collector/collectors/truth_social.py
index 33ebc86..e2acd88 100644
--- a/services/news-collector/src/news_collector/collectors/truth_social.py
+++ b/services/news-collector/src/news_collector/collectors/truth_social.py
@@ -2,7 +2,7 @@
import logging
import re
-from datetime import datetime, timezone
+from datetime import UTC, datetime
import aiohttp
from nltk.sentiment.vader import SentimentIntensityAnalyzer
@@ -67,7 +67,7 @@ class TruthSocialCollector(BaseCollector):
try:
published_at = datetime.fromisoformat(created_at_str.replace("Z", "+00:00"))
except Exception:
- published_at = datetime.now(timezone.utc)
+ published_at = datetime.now(UTC)
items.append(
NewsItem(
diff --git a/services/news-collector/src/news_collector/main.py b/services/news-collector/src/news_collector/main.py
index af0cd20..7265f00 100644
--- a/services/news-collector/src/news_collector/main.py
+++ b/services/news-collector/src/news_collector/main.py
@@ -1,10 +1,18 @@
"""News Collector Service — fetches news from multiple sources and aggregates sentiment."""
import asyncio
-from datetime import datetime, timezone
+from datetime import UTC, datetime
import aiohttp
+from news_collector.collectors.fear_greed import FearGreedCollector
+from news_collector.collectors.fed import FedCollector
+from news_collector.collectors.finnhub import FinnhubCollector
+from news_collector.collectors.reddit import RedditCollector
+from news_collector.collectors.rss import RSSCollector
+from news_collector.collectors.sec_edgar import SecEdgarCollector
+from news_collector.collectors.truth_social import TruthSocialCollector
+from news_collector.config import NewsCollectorConfig
from shared.broker import RedisBroker
from shared.db import Database
from shared.events import NewsEvent
@@ -13,19 +21,10 @@ from shared.logging import setup_logging
from shared.metrics import ServiceMetrics
from shared.models import NewsItem
from shared.notifier import TelegramNotifier
-from shared.sentiment_models import MarketSentiment
from shared.sentiment import SentimentAggregator
+from shared.sentiment_models import MarketSentiment
from shared.shutdown import GracefulShutdown
-from news_collector.config import NewsCollectorConfig
-from news_collector.collectors.finnhub import FinnhubCollector
-from news_collector.collectors.rss import RSSCollector
-from news_collector.collectors.sec_edgar import SecEdgarCollector
-from news_collector.collectors.truth_social import TruthSocialCollector
-from news_collector.collectors.reddit import RedditCollector
-from news_collector.collectors.fear_greed import FearGreedCollector
-from news_collector.collectors.fed import FedCollector
-
# Health check port: base + 4
HEALTH_PORT_OFFSET = 4
@@ -56,7 +55,7 @@ async def run_collector_loop(collector, db: Database, broker: RedisBroker, log)
collector=collector.name,
count=count,
)
- except (aiohttp.ClientError, ConnectionError, TimeoutError, asyncio.TimeoutError) as exc:
+ except (aiohttp.ClientError, ConnectionError, TimeoutError) as exc:
log.warning(
"collector_network_error",
collector=collector.name,
@@ -83,7 +82,7 @@ async def run_fear_greed_loop(collector: FearGreedCollector, db: Database, log)
vix=None,
fed_stance="neutral",
market_regime=_determine_regime(result.fear_greed, None),
- updated_at=datetime.now(timezone.utc),
+ updated_at=datetime.now(UTC),
)
await db.upsert_market_sentiment(ms)
log.info(
@@ -91,7 +90,7 @@ async def run_fear_greed_loop(collector: FearGreedCollector, db: Database, log)
value=result.fear_greed,
label=result.fear_greed_label,
)
- except (aiohttp.ClientError, ConnectionError, TimeoutError, asyncio.TimeoutError) as exc:
+ except (aiohttp.ClientError, ConnectionError, TimeoutError) as exc:
log.warning("fear_greed_network_error", error=str(exc))
except (ValueError, KeyError, TypeError) as exc:
log.warning("fear_greed_parse_error", error=str(exc))
@@ -104,13 +103,13 @@ async def run_aggregator_loop(db: Database, interval: int, log) -> None:
while True:
await asyncio.sleep(interval)
try:
- now = datetime.now(timezone.utc)
+ now = datetime.now(UTC)
news_items = await db.get_recent_news(hours=24)
scores = aggregator.aggregate(news_items, now)
for score in scores.values():
await db.upsert_symbol_score(score)
log.info("aggregation_complete", symbols=len(scores))
- except (ConnectionError, TimeoutError, asyncio.TimeoutError) as exc:
+ except (ConnectionError, TimeoutError) as exc:
log.warning("aggregator_network_error", error=str(exc))
except (ValueError, KeyError, TypeError) as exc:
log.warning("aggregator_parse_error", error=str(exc))
@@ -167,14 +166,13 @@ async def run() -> None:
)
try:
- tasks = []
- for collector in news_collectors:
- tasks.append(
- asyncio.create_task(
- run_collector_loop(collector, db, broker, log),
- name=f"collector-{collector.name}",
- )
+ tasks = [
+ asyncio.create_task(
+ run_collector_loop(collector, db, broker, log),
+ name=f"collector-{collector.name}",
)
+ for collector in news_collectors
+ ]
tasks.append(
asyncio.create_task(
run_fear_greed_loop(fear_greed, db, log),