66import  aiofiles 
77import  re 
88import  math 
9- from  datetime  import  datetime 
9+ from  datetime  import  datetime , timedelta 
10+ from  homeassistant .core  import  HomeAssistant 
11+ from  homeassistant .components .http .auth  import  async_sign_path 
1012
11- SCRIPT_DIR  =  os .path .dirname (os .path .abspath (__file__ ))
12- CACHE_FOLDER  =  os .path .join (SCRIPT_DIR , "images_cache" )
13- os .makedirs (CACHE_FOLDER , exist_ok = True )
13+ from  .const  import  TIMEOUT_MINUTES 
1414
1515import  logging 
1616_LOGGER  =  logging .getLogger (__name__ )
@@ -30,34 +30,17 @@ def parse_library(root):
3030
3131    return  output 
3232
33- def  get_image_filename (url ):
34-     """Generate a unique filename from the URL while keeping the original extension.""" 
35-     parsed_url  =  urlparse (url )
36-     ext  =  os .path .splitext (parsed_url .path )[- 1 ]
37-     if  not  ext :
38-         ext  =  ".jpg" 
39-     return  hashlib .md5 (url .encode ()).hexdigest () +  ext 
40- 
41- async  def  download_image (url ):
42-     """Download an image asynchronously and save it to the cache folder without blocking the event loop.""" 
43-     filename  =  get_image_filename (url )
44-     file_path  =  os .path .join (CACHE_FOLDER , filename )
45- 
46-     async  with  aiohttp .ClientSession () as  session :
47-         async  with  session .get (url ) as  response :
48-             if  response .status  ==  200 :
49-                 async  with  aiofiles .open (file_path , "wb" ) as  file :  # ✅ Non-blocking file write 
50-                     await  file .write (await  response .read ())  # ✅ Async file writing 
51-                 return  filename 
52-     return  None 
53- 
54- def  cleanup_old_images (valid_filenames ):
55-     """Delete images that are not in the updated list.""" 
56-     for  filename  in  os .listdir (CACHE_FOLDER ):
57-         if  filename  not  in valid_filenames :
58-             os .remove (os .path .join (CACHE_FOLDER , filename ))
59- 
60- async  def  parse_data (data , max , base_url , token , identifier , section_key , images_base_url , is_all  =  False ):
33+ def  extract_metadata_and_type (path ):
34+     pattern  =  re .compile (r"/library/metadata/(\d+)/(thumb|art)/(\d+)" )
35+     match  =  pattern .search (path )
36+     
37+     if  match :
38+         metadata_id  =  match .group (1 )
39+         art_type  =  match .group (2 )
40+         art_id  =  match .group (3 )
41+         return  metadata_id , art_type , art_id 
42+ 
43+ def  parse_data (hass : HomeAssistant , data , max , base_url , token , identifier , section_key , images_base_url , is_all  =  False ):
6144    if  is_all :
6245        sorted_data  =  []
6346        for  k  in  data .keys ():
@@ -109,16 +92,12 @@ async def parse_data(data, max, base_url, token, identifier, section_key, images
10992        data_output ['trailer' ] =  item .get ('trailer' )
11093
11194
112-         thumb_filename  =  await  download_image (f'{ base_url } { thumb } { token }  )
113-         if  thumb_filename :
114-             valid_images .add (thumb_filename )
115-         data_output ["poster" ] =  (f'{ images_base_url } { thumb_filename }  ) if  thumb_filename  else  "" 
95+         thumb_IDs  =  extract_metadata_and_type (thumb )
96+         data_output ["poster" ] =  async_sign_path (hass , f'{ images_base_url } { thumb_IDs [0 ]} { thumb_IDs [2 ]}  , timedelta (minutes = TIMEOUT_MINUTES )) if  thumb_IDs  else  "" 
11697
11798
118-         art_filename  =  await  download_image (f'{ base_url } { art } { token }  )
119-         if  art_filename :
120-             valid_images .add (art_filename )
121-         data_output ["fanart" ] =  (f'{ images_base_url } { art_filename }  ) if  art_filename  else  "" 
99+         art_IDs  =  extract_metadata_and_type (art )
100+         data_output ["fanart" ] =  async_sign_path (hass , f'{ images_base_url } { art_IDs [0 ]} { art_IDs [2 ]}  , timedelta (minutes = TIMEOUT_MINUTES )) if  art_IDs  else  "" 
122101
123102
124103        data_output ["deep_link" ] =  deep_link  if  identifier  else  None 
0 commit comments