@@ -21,11 +21,23 @@ def main(event, _):
2121 """
2222 logging .info ("Event: %s" , event )
2323
24- latest_articles = get_latest_article_with_timezone (fetch_bleeping_computer_rss ())
24+ # Fetch articles from both feeds
25+ bleeping_articles = fetch_bleeping_computer_rss ()
26+ hacker_articles = fetch_hacker_news_rss ()
27+
28+ # Combine articles from both feeds
29+ all_articles = bleeping_articles + hacker_articles
30+
31+ # Get today's articles from the combined list
32+ latest_articles = get_latest_article_with_timezone (all_articles )
2533
2634 logging .info ("Latest articles: %s" , latest_articles )
27- links = [article ["link" ] for article in latest_articles ]
28- return publish_message_to_table (links )
35+
36+ # Extract links from Bleeping Computer articles only
37+ bleeping_links = [article ["link" ] for article in latest_articles
38+ if "bleepingcomputer.com" in article ["link" ]]
39+
40+ return publish_message_to_table (bleeping_links )
2941
3042
3143def get_latest_article_with_timezone (articles , timezone_str = "UTC" ):
@@ -37,48 +49,58 @@ def get_latest_article_with_timezone(articles, timezone_str="UTC"):
3749 todays_articles = []
3850 for article in articles :
3951 date_str = article ["published" ]
40- parsed_date = datetime .strptime (date_str , "%a, %d %b %Y %H:%M:%S %z" )
41- formatted_date = parsed_date .isoformat ()
42- pub_date = datetime .fromisoformat (formatted_date .replace ("Z" , "+00:00" ))
43- pub_date = pub_date .astimezone (tz )
52+ # Handle 'GMT' timezone suffix
53+ if date_str .endswith (' GMT' ):
54+ date_str = date_str .replace (' GMT' , ' +0000' )
55+ try :
56+ parsed_date = datetime .strptime (date_str , "%a, %d %b %Y %H:%M:%S %z" )
57+ except ValueError as e :
58+ logging .error ("Error parsing date '%s': %s" , date_str , e )
59+ continue
60+ pub_date = parsed_date .astimezone (tz )
4461 if pub_date .date () == today :
4562 todays_articles .append (article )
46-
4763 return todays_articles
4864
4965
5066def fetch_bleeping_computer_rss (feed_url = "https://www.bleepingcomputer.com/feed/" ):
5167 """Fetch articles from the Bleeping Computer RSS feed."""
52- # Parse the RSS feed
5368 feed = feedparser .parse (feed_url )
54-
5569 if feed .bozo :
5670 raise ValueError (f"Error parsing feed: { feed .bozo_exception } " )
57-
58- # Extract articles
5971 articles = []
6072 for entry in feed .entries :
61- articles .append (
62- {
63- "title" : entry .title ,
64- "link" : entry .link ,
65- "published" : entry .published ,
66- "summary" : entry .summary ,
67- }
68- )
69-
73+ articles .append ({
74+ "title" : entry .title ,
75+ "link" : entry .link ,
76+ "published" : entry .published ,
77+ "summary" : entry .summary ,
78+ })
7079 return articles
7180
81+ def fetch_hacker_news_rss (feed_url = "https://feeds.feedburner.com/TheHackersNews" ):
82+ """Fetch articles from The Hacker News RSS feed."""
83+ feed = feedparser .parse (feed_url )
84+ if feed .bozo :
85+ raise ValueError (f"Error parsing feed: { feed .bozo_exception } " )
86+ h_articles = []
87+ for entry in feed .entries :
88+ h_articles .append ({
89+ "title" : entry .title ,
90+ "link" : entry .link ,
91+ "published" : entry .published ,
92+ "summary" : entry .summary ,
93+ })
94+ return h_articles
95+
7296
7397def publish_message_to_table (links : str ):
7498 """
75- Sends a message to the SQS queue .
99+ Sends a message to the DynamoDB table .
76100 Returns a dictionary with the message and the message ID.
77101 """
78- logging .info ("Sending message to SQS queue" )
79-
102+ logging .info ("Sending message to DynamoDB table" )
80103 dynamodb_client = boto3 .client ("dynamodb" )
81-
82104 for link in links :
83105 logging .info ("Link: %s" , link )
84106 response = dynamodb_client .put_item (
@@ -92,5 +114,4 @@ def publish_message_to_table(links: str):
92114 },
93115 )
94116 logging .info ("Response: %s" , response )
95-
96117 return {"message" : "Message has been logged to DynamoDB!" , "links" : links }
0 commit comments