From 7194e8f482d47f4318c0fcfc322245fe33a12fff Mon Sep 17 00:00:00 2001 From: debu-sinha Date: Mon, 2 Mar 2026 00:36:17 -0500 Subject: [PATCH] Add future annotations to prompt_helper for Python 3.9 compat ChatPromptHelper uses PEP 604 union syntax (LLM | None) which raises TypeError at runtime on Python 3.9. Adding the future annotations import makes all type hints lazy strings, fixing the 3.9 test suite. --- llama-index-core/llama_index/core/indices/prompt_helper.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/llama-index-core/llama_index/core/indices/prompt_helper.py b/llama-index-core/llama_index/core/indices/prompt_helper.py index 2b5313fa11c..01504c24166 100644 --- a/llama-index-core/llama_index/core/indices/prompt_helper.py +++ b/llama-index-core/llama_index/core/indices/prompt_helper.py @@ -9,6 +9,8 @@ needed), or truncating them so that they fit in a single LLM call. """ +from __future__ import annotations + import logging from copy import deepcopy from typing import TYPE_CHECKING, Callable, List, Optional, Sequence, cast