@@ -618,6 +618,7 @@ bool fetch(JSContext *cx, unsigned argc, Value *vp) {
618618 return false ;
619619 }
620620 if (!should_use_guest_caching_out) {
621+ fastly::push_debug_message (" Using traditional fetch without cache API" );
621622 return fetch_send_body<false >(cx, request, args.rval ());
622623 }
623624
@@ -640,6 +641,7 @@ bool fetch(JSContext *cx, unsigned argc, Value *vp) {
640641
641642 // If not cacheable, fallback to non-caching path
642643 if (!is_cacheable) {
644+ fastly::push_debug_message (" Request not cacheable, using non-caching fetch" );
643645 return fetch_send_body<true >(cx, request, args.rval ());
644646 }
645647
@@ -664,6 +666,7 @@ bool fetch(JSContext *cx, unsigned argc, Value *vp) {
664666 auto transaction_res =
665667 host_api::HttpCacheEntry::transaction_lookup (request_handle, override_key_span);
666668 if (auto *err = transaction_res.to_err ()) {
669+ fastly::push_debug_message (" Transaction lookup error" );
667670 if (host_api::error_is_limit_exceeded (*err)) {
668671 JS_ReportErrorASCII (cx, " HTTP caching limit exceeded" );
669672 } else {
@@ -680,6 +683,7 @@ bool fetch(JSContext *cx, unsigned argc, Value *vp) {
680683
681684 auto state_res = cache_entry.get_state ();
682685 if (auto *err = state_res.to_err ()) {
686+ fastly::push_debug_message (" Cache state error" );
683687 HANDLE_ERROR (cx, *err);
684688 JSObject *promise = PromiseRejectedWithPendingError (cx);
685689 if (!promise) {
@@ -689,10 +693,12 @@ bool fetch(JSContext *cx, unsigned argc, Value *vp) {
689693 return true ;
690694 }
691695 auto cache_state = state_res.unwrap ();
696+ fastly::push_debug_message (std::to_string (cache_state.state ));
692697
693698 // Check for usable cached response
694699 auto found_res = cache_entry.get_found_response (true );
695700 if (auto *err = found_res.to_err ()) {
701+ fastly::push_debug_message (" Usable cache response error" );
696702 HANDLE_ERROR (cx, *err);
697703 JSObject *promise = PromiseRejectedWithPendingError (cx);
698704 if (!promise) {
@@ -704,6 +710,7 @@ bool fetch(JSContext *cx, unsigned argc, Value *vp) {
704710
705711 auto maybe_response = found_res.unwrap ();
706712 if (maybe_response.has_value ()) {
713+ fastly::push_debug_message (" Have usable response" );
707714 auto cached_response = maybe_response.value ();
708715
709716 if (cache_state.must_insert_or_update ()) {
@@ -751,12 +758,15 @@ bool fetch(JSContext *cx, unsigned argc, Value *vp) {
751758
752759 // No valid cached response, need to make backend request
753760 if (!cache_state.must_insert_or_update ()) {
761+ fastly::push_debug_message (" No usable response, and don't need to insert or update -> pass" );
754762 // transaction entry is done
755763 cache_entry.close ();
756764 // request collapsing has been disabled: pass the original request to the origin without
757765 // updating the cache and without caching
758766 return fetch_send_body<true >(cx, request, args.rval ());
759767 } else {
768+ fastly::push_debug_message (
769+ " No usable response, and must insert or update, running origin fetch hooks" );
760770 JS::RootedValue stream_back_promise (cx);
761771 if (!fetch_send_body_with_cache_hooks (cx, request, cache_entry, &stream_back_promise)) {
762772 cache_entry.close ();
0 commit comments