Skip to content

Commit c636f97

Browse files
authored
Update exchange_rate.py
1 parent cbe590b commit c636f97

File tree

1 file changed

+9
-25
lines changed

1 file changed

+9
-25
lines changed

app/services/exchange_rate.py

Lines changed: 9 additions & 25 deletions
Original file line numberDiff line numberDiff line change
@@ -20,9 +20,11 @@
2020
# Default to 60 minutes (3600 seconds) if not specified
2121
CACHE_TTL = int(getattr(settings, "CACHE_TTL_MINUTES", 10)) * 60
2222

23+
CACHE_TTL = int(getattr(settings, "CACHE_TTL_MINUTES", 10)) * 60
24+
2325
# Create a global cache to store timestamps
2426
_cache_timestamps = {}
25-
27+
@cached(ttl=CACHE_TTL, cache=SimpleMemoryCache)
2628
# Custom caching implementation to track cache status
2729
async def fetch_and_parse_rate_data(url: str, timeout: int = 10) -> Tuple[Dict[str, Any], bool, datetime]:
2830
"""
@@ -42,38 +44,21 @@ async def fetch_and_parse_rate_data(url: str, timeout: int = 10) -> Tuple[Dict[s
4244
Raises:
4345
Exception: If there's an error during fetching or parsing the data
4446
"""
45-
cache_key = f"exchange_rate:{url}"
46-
cache = SimpleMemoryCache()
47+
4748
is_cached = False
4849
now = datetime.now()
4950

50-
# Check if data is in cache
51-
cached_data = await cache.get(cache_key)
52-
53-
if cached_data is not None:
54-
# Data is in cache
55-
is_cached = True
56-
# Get expiration time from our timestamps dictionary
57-
expiration_time = _cache_timestamps.get(cache_key, now + timedelta(seconds=CACHE_TTL))
58-
logger.info(f"Using cached exchange rate data. Next update at {expiration_time}")
59-
return cached_data, is_cached, expiration_time
60-
61-
# If not in cache, fetch new data
62-
next_update = now + timedelta(seconds=CACHE_TTL)
63-
_cache_timestamps[cache_key] = next_update
64-
logger.info(f"Fetching fresh exchange rate data. Will cache until {next_update}")
51+
6552
try:
6653
async with httpx.AsyncClient() as client:
6754
logger.info(f"Fetching exchange rate data from {url}")
6855
response = await client.get(url, timeout=timeout)
6956
response.raise_for_status() # Raise exception for HTTP errors
7057

71-
# Parse the HTML content with lxml for better performance
58+
7259
soup = BeautifulSoup(response.text, 'lxml')
7360

74-
# Extract the exchange rate data
75-
# Note: The actual extraction logic will depend on the specific website structure
76-
# This is a placeholder example that should be adapted to the target website
61+
7762
exchange_data = _parse_exchange_rate_data(soup)
7863

7964
if not exchange_data:
@@ -82,10 +67,9 @@ async def fetch_and_parse_rate_data(url: str, timeout: int = 10) -> Tuple[Dict[s
8267

8368
logger.info(f"Successfully fetched and parsed exchange rate data: {exchange_data}")
8469

85-
# Store in cache
86-
await cache.set(cache_key, exchange_data, ttl=CACHE_TTL)
8770

88-
return exchange_data, is_cached, next_update
71+
72+
return exchange_data, None, None
8973

9074
except httpx.TimeoutException:
9175
logger.error(f"Timeout error while fetching data from {url}")

0 commit comments

Comments
 (0)