@@ -65,27 +65,35 @@ async fn fetch_fresh_streaming() -> Result<Value, String> {
6565
6666 // Merge new items (avoid duplicates by URL)
6767 let initial_count = all_items. len ( ) ;
68- for mut item in new_items {
68+ for item in new_items {
6969 if let Some ( url) = item[ "url" ] . as_str ( ) {
7070 if !all_items. iter ( ) . any ( |existing| existing[ "url" ] . as_str ( ) == Some ( url) ) {
71- // Cache poster if available
72- if let ( Some ( s3) , Some ( poster_url) ) = ( s3_client. as_ref ( ) , item[ "poster_url" ] . as_str ( ) ) {
73- let mut hasher = DefaultHasher :: new ( ) ;
74- url. hash ( & mut hasher) ;
75- let url_hash = format ! ( "{:x}" , hasher. finish( ) ) ;
76-
77- match cache_poster ( s3, & bucket, & url_hash, poster_url, & client) . await {
78- Ok ( proxy_path) => {
79- item[ "poster_url" ] = json ! ( proxy_path) ;
71+ all_items. insert ( 0 , item) ;
72+ }
73+ }
74+ }
75+
76+ // Process all items to ensure posters are cached and proxied
77+ if let Some ( s3) = s3_client. as_ref ( ) {
78+ for item in all_items. iter_mut ( ) {
79+ if let Some ( poster_url) = item[ "poster_url" ] . as_str ( ) {
80+ // If not already proxied, try to cache it
81+ if !poster_url. starts_with ( "/api/streaming/poster/" ) {
82+ if let Some ( url) = item[ "url" ] . as_str ( ) {
83+ let mut hasher = DefaultHasher :: new ( ) ;
84+ url. hash ( & mut hasher) ;
85+ let url_hash = format ! ( "{:x}" , hasher. finish( ) ) ;
86+
87+ match cache_poster ( s3, & bucket, & url_hash, poster_url, & client) . await {
88+ Ok ( proxy_path) => {
89+ item[ "poster_url" ] = json ! ( proxy_path) ;
90+ // Also store/update individual JSON metadata with proxied URL
91+ let _ = store_individual_movie ( s3, & bucket, & url_hash, item) . await ;
92+ }
93+ Err ( e) => tracing:: warn!( "⚠️ Failed to cache poster for {}: {}" , url, e) ,
8094 }
81- Err ( e) => tracing:: warn!( "⚠️ Failed to cache poster for {}: {}" , url, e) ,
8295 }
83-
84- // Store individual JSON metadata
85- let _ = store_individual_movie ( s3, & bucket, & url_hash, & item) . await ;
8696 }
87-
88- all_items. insert ( 0 , item) ;
8997 }
9098 }
9199 }
@@ -138,8 +146,17 @@ async fn cache_poster(
138146 return Ok ( format ! ( "/api/streaming/poster/{}" , hash) ) ;
139147 }
140148
149+ // Ensure absolute URL
150+ let absolute_url = if poster_url. starts_with ( "/" ) && !poster_url. starts_with ( "//" ) {
151+ format ! ( "{}{}" , BASE_URL , poster_url)
152+ } else if poster_url. starts_with ( "//" ) {
153+ format ! ( "https:{}" , poster_url)
154+ } else {
155+ poster_url. to_string ( )
156+ } ;
157+
141158 // Download poster
142- let response = http_client. get ( poster_url ) . send ( ) . await . map_err ( |e| e. to_string ( ) ) ?;
159+ let response = http_client. get ( absolute_url ) . send ( ) . await . map_err ( |e| e. to_string ( ) ) ?;
143160 let bytes = response. bytes ( ) . await . map_err ( |e| e. to_string ( ) ) ?;
144161
145162 // Store in S3
@@ -237,7 +254,14 @@ fn parse_movie_article(content: &str) -> Option<Value> {
237254 content[ img_pos..] . find ( "src=\" " ) . and_then ( |src_pos| {
238255 let src_start = img_pos + src_pos + 5 ;
239256 content[ src_start..] . find ( "\" " ) . map ( |src_end| {
240- content[ src_start..src_start + src_end] . replace ( "&" , "&" )
257+ let path = content[ src_start..src_start + src_end] . replace ( "&" , "&" ) ;
258+ if path. starts_with ( "/" ) && !path. starts_with ( "//" ) {
259+ format ! ( "{}{}" , BASE_URL , path)
260+ } else if path. starts_with ( "//" ) {
261+ format ! ( "https:{}" , path)
262+ } else {
263+ path
264+ }
241265 } )
242266 } )
243267 } ) ;
0 commit comments