20
20
# Default to 60 minutes (3600 seconds) if not specified
21
21
CACHE_TTL = int (getattr (settings , "CACHE_TTL_MINUTES" , 10 )) * 60
22
22
23
+ CACHE_TTL = int (getattr (settings , "CACHE_TTL_MINUTES" , 10 )) * 60
24
+
23
25
# Create a global cache to store timestamps
24
26
_cache_timestamps = {}
25
-
27
+ @ cached ( ttl = CACHE_TTL , cache = SimpleMemoryCache )
26
28
# Custom caching implementation to track cache status
27
29
async def fetch_and_parse_rate_data (url : str , timeout : int = 10 ) -> Tuple [Dict [str , Any ], bool , datetime ]:
28
30
"""
@@ -42,38 +44,21 @@ async def fetch_and_parse_rate_data(url: str, timeout: int = 10) -> Tuple[Dict[s
42
44
Raises:
43
45
Exception: If there's an error during fetching or parsing the data
44
46
"""
45
- cache_key = f"exchange_rate:{ url } "
46
- cache = SimpleMemoryCache ()
47
+
47
48
is_cached = False
48
49
now = datetime .now ()
49
50
50
- # Check if data is in cache
51
- cached_data = await cache .get (cache_key )
52
-
53
- if cached_data is not None :
54
- # Data is in cache
55
- is_cached = True
56
- # Get expiration time from our timestamps dictionary
57
- expiration_time = _cache_timestamps .get (cache_key , now + timedelta (seconds = CACHE_TTL ))
58
- logger .info (f"Using cached exchange rate data. Next update at { expiration_time } " )
59
- return cached_data , is_cached , expiration_time
60
-
61
- # If not in cache, fetch new data
62
- next_update = now + timedelta (seconds = CACHE_TTL )
63
- _cache_timestamps [cache_key ] = next_update
64
- logger .info (f"Fetching fresh exchange rate data. Will cache until { next_update } " )
51
+
65
52
try :
66
53
async with httpx .AsyncClient () as client :
67
54
logger .info (f"Fetching exchange rate data from { url } " )
68
55
response = await client .get (url , timeout = timeout )
69
56
response .raise_for_status () # Raise exception for HTTP errors
70
57
71
- # Parse the HTML content with lxml for better performance
58
+
72
59
soup = BeautifulSoup (response .text , 'lxml' )
73
60
74
- # Extract the exchange rate data
75
- # Note: The actual extraction logic will depend on the specific website structure
76
- # This is a placeholder example that should be adapted to the target website
61
+
77
62
exchange_data = _parse_exchange_rate_data (soup )
78
63
79
64
if not exchange_data :
@@ -82,10 +67,9 @@ async def fetch_and_parse_rate_data(url: str, timeout: int = 10) -> Tuple[Dict[s
82
67
83
68
logger .info (f"Successfully fetched and parsed exchange rate data: { exchange_data } " )
84
69
85
- # Store in cache
86
- await cache .set (cache_key , exchange_data , ttl = CACHE_TTL )
87
70
88
- return exchange_data , is_cached , next_update
71
+
72
+ return exchange_data , None , None
89
73
90
74
except httpx .TimeoutException :
91
75
logger .error (f"Timeout error while fetching data from { url } " )
0 commit comments