@@ -160,66 +160,116 @@ def __init__(
160160 self .upsert_guard ()
161161
162162 @property
163+ @deprecated (
164+ """'Guard.prompt_schema' is deprecated and will be removed in \
165+ versions 0.5.x and beyond."""
166+ )
163167 def prompt_schema (self ) -> Optional [StringSchema ]:
164168 """Return the input schema."""
165169 return self .rail .prompt_schema
166170
167171 @property
172+ @deprecated (
173+ """'Guard.instructions_schema' is deprecated and will be removed in \
174+ versions 0.5.x and beyond."""
175+ )
168176 def instructions_schema (self ) -> Optional [StringSchema ]:
169177 """Return the input schema."""
170178 return self .rail .instructions_schema
171179
172180 @property
181+ @deprecated (
182+ """'Guard.msg_history_schema' is deprecated and will be removed in \
183+ versions 0.5.x and beyond."""
184+ )
173185 def msg_history_schema (self ) -> Optional [StringSchema ]:
174186 """Return the input schema."""
175187 return self .rail .msg_history_schema
176188
177189 @property
190+ @deprecated (
191+ """'Guard.output_schema' is deprecated and will be removed in \
192+ versions 0.5.x and beyond."""
193+ )
178194 def output_schema (self ) -> Schema :
179195 """Return the output schema."""
180196 return self .rail .output_schema
181197
182198 @property
199+ @deprecated (
200+ """'Guard.instructions' is deprecated and will be removed in \
201+ versions 0.5.x and beyond. Use 'Guard.history.last.instructions' instead."""
202+ )
183203 def instructions (self ) -> Optional [Instructions ]:
184204 """Return the instruction-prompt."""
185205 return self .rail .instructions
186206
187207 @property
208+ @deprecated (
209+ """'Guard.prompt' is deprecated and will be removed in \
210+ versions 0.5.x and beyond. Use 'Guard.history.last.prompt' instead."""
211+ )
188212 def prompt (self ) -> Optional [Prompt ]:
189213 """Return the prompt."""
190214 return self .rail .prompt
191215
192216 @property
217+ @deprecated (
218+ """'Guard.raw_prompt' is deprecated and will be removed in \
219+ versions 0.5.x and beyond. Use 'Guard.history.last.prompt' instead."""
220+ )
193221 def raw_prompt (self ) -> Optional [Prompt ]:
194222 """Return the prompt, alias for `prompt`."""
195- return self .prompt
223+ return self .rail . prompt
196224
197225 @property
226+ @deprecated (
227+ """'Guard.base_prompt' is deprecated and will be removed in \
228+ versions 0.5.x and beyond. Use 'Guard.history.last.prompt' instead."""
229+ )
198230 def base_prompt (self ) -> Optional [str ]:
199231 """Return the base prompt i.e. prompt.source."""
200- if self .prompt is None :
232+ if self .rail . prompt is None :
201233 return None
202- return self .prompt .source
234+ return self .rail . prompt .source
203235
204236 @property
237+ @deprecated (
238+ """'Guard.reask_prompt' is deprecated and will be removed in \
239+ versions 0.5.x and beyond. Use 'Guard.history.last.reask_prompts' instead."""
240+ )
205241 def reask_prompt (self ) -> Optional [Prompt ]:
206242 """Return the reask prompt."""
207- return self .output_schema .reask_prompt_template
243+ return self .rail . output_schema .reask_prompt_template
208244
209245 @reask_prompt .setter
246+ @deprecated (
247+ """'Guard.reask_prompt' is deprecated and will be removed in \
248+ versions 0.5.x and beyond. Pass 'reask_prompt' in the initializer \
249+ method instead: e.g. 'Guard.from_pydantic'."""
250+ )
210251 def reask_prompt (self , reask_prompt : Optional [str ]):
211252 """Set the reask prompt."""
212- self .output_schema .reask_prompt_template = reask_prompt
253+ self .rail . output_schema .reask_prompt_template = reask_prompt
213254
214255 @property
256+ @deprecated (
257+ """'Guard.reask_instructions' is deprecated and will be removed in \
258+ versions 0.5.x and beyond. Use 'Guard.history.last.reask_instructions' instead."""
259+ )
215260 def reask_instructions (self ) -> Optional [Instructions ]:
216261 """Return the reask prompt."""
217- return self .output_schema .reask_instructions_template
262+ return self .rail . output_schema .reask_instructions_template
218263
219264 @reask_instructions .setter
265+ @deprecated (
266+ """'Guard.reask_instructions' is deprecated and will be removed in \
267+ versions 0.5.x and beyond. Pass 'reask_instructions' in the initializer \
268+ method instead: e.g. 'Guard.from_pydantic'."""
269+ )
220270 def reask_instructions (self , reask_instructions : Optional [str ]):
221271 """Set the reask prompt."""
222- self .output_schema .reask_instructions_template = reask_instructions
272+ self .rail . output_schema .reask_instructions_template = reask_instructions
223273
224274 def configure (
225275 self ,
@@ -549,10 +599,14 @@ def __call(
549599 ("guard_id" , self ._guard_id ),
550600 ("user_id" , self ._user_id ),
551601 ("llm_api" , llm_api .__name__ if llm_api else "None" ),
552- ("custom_reask_prompt" , self .reask_prompt is not None ),
602+ (
603+ "custom_reask_prompt" ,
604+ self .rail .output_schema .reask_prompt_template is not None ,
605+ ),
553606 (
554607 "custom_reask_instructions" ,
555- self .reask_instructions is not None ,
608+ self .rail .output_schema .reask_instructions_template
609+ is not None ,
556610 ),
557611 ],
558612 is_parent = True , # It will have children
@@ -570,9 +624,11 @@ def __call(
570624 "This should never happen."
571625 )
572626
573- input_prompt = prompt or (self .prompt ._source if self .prompt else None )
627+ input_prompt = prompt or (
628+ self .rail .prompt ._source if self .rail .prompt else None
629+ )
574630 input_instructions = instructions or (
575- self .instructions ._source if self .instructions else None
631+ self .rail . instructions ._source if self . rail .instructions else None
576632 )
577633 call_inputs = CallInputs (
578634 llm_api = llm_api ,
@@ -663,8 +719,8 @@ def _call_sync(
663719 * args ,
664720 ** kwargs ,
665721 ) -> Union [ValidationOutcome [OT ], Iterable [ValidationOutcome [OT ]]]:
666- instructions_obj = instructions or self .instructions
667- prompt_obj = prompt or self .prompt
722+ instructions_obj = instructions or self .rail . instructions
723+ prompt_obj = prompt or self .rail . prompt
668724 msg_history_obj = msg_history or []
669725 if prompt_obj is None :
670726 if msg_history is not None and not len (msg_history_obj ):
@@ -681,10 +737,10 @@ def _call_sync(
681737 prompt = prompt_obj ,
682738 msg_history = msg_history_obj ,
683739 api = get_llm_ask (llm_api , * args , ** kwargs ),
684- prompt_schema = self .prompt_schema ,
685- instructions_schema = self .instructions_schema ,
686- msg_history_schema = self .msg_history_schema ,
687- output_schema = self .output_schema ,
740+ prompt_schema = self .rail . prompt_schema ,
741+ instructions_schema = self .rail . instructions_schema ,
742+ msg_history_schema = self .rail . msg_history_schema ,
743+ output_schema = self .rail . output_schema ,
688744 num_reasks = num_reasks ,
689745 metadata = metadata ,
690746 base_model = self .base_model ,
@@ -699,10 +755,10 @@ def _call_sync(
699755 prompt = prompt_obj ,
700756 msg_history = msg_history_obj ,
701757 api = get_llm_ask (llm_api , * args , ** kwargs ),
702- prompt_schema = self .prompt_schema ,
703- instructions_schema = self .instructions_schema ,
704- msg_history_schema = self .msg_history_schema ,
705- output_schema = self .output_schema ,
758+ prompt_schema = self .rail . prompt_schema ,
759+ instructions_schema = self .rail . instructions_schema ,
760+ msg_history_schema = self .rail . msg_history_schema ,
761+ output_schema = self .rail . output_schema ,
706762 num_reasks = num_reasks ,
707763 metadata = metadata ,
708764 base_model = self .base_model ,
@@ -744,8 +800,8 @@ async def _call_async(
744800 Returns:
745801 The raw text output from the LLM and the validated output.
746802 """
747- instructions_obj = instructions or self .instructions
748- prompt_obj = prompt or self .prompt
803+ instructions_obj = instructions or self .rail . instructions
804+ prompt_obj = prompt or self .rail . prompt
749805 msg_history_obj = msg_history or []
750806 if prompt_obj is None :
751807 if msg_history_obj is not None and not len (msg_history_obj ):
@@ -759,10 +815,10 @@ async def _call_async(
759815 prompt = prompt_obj ,
760816 msg_history = msg_history_obj ,
761817 api = get_async_llm_ask (llm_api , * args , ** kwargs ),
762- prompt_schema = self .prompt_schema ,
763- instructions_schema = self .instructions_schema ,
764- msg_history_schema = self .msg_history_schema ,
765- output_schema = self .output_schema ,
818+ prompt_schema = self .rail . prompt_schema ,
819+ instructions_schema = self .rail . instructions_schema ,
820+ msg_history_schema = self .rail . msg_history_schema ,
821+ output_schema = self .rail . output_schema ,
766822 num_reasks = num_reasks ,
767823 metadata = metadata ,
768824 base_model = self .base_model ,
@@ -887,10 +943,14 @@ def __parse(
887943 ("guard_id" , self ._guard_id ),
888944 ("user_id" , self ._user_id ),
889945 ("llm_api" , llm_api .__name__ if llm_api else "None" ),
890- ("custom_reask_prompt" , self .reask_prompt is not None ),
946+ (
947+ "custom_reask_prompt" ,
948+ self .rail .output_schema .reask_prompt_template is not None ,
949+ ),
891950 (
892951 "custom_reask_instructions" ,
893- self .reask_instructions is not None ,
952+ self .rail .output_schema .reask_instructions_template
953+ is not None ,
894954 ),
895955 ],
896956 is_parent = True , # It will have children
@@ -912,9 +972,9 @@ def __parse(
912972 set_tracer (self ._tracer )
913973 set_tracer_context (self ._tracer_context )
914974
915- input_prompt = self .prompt ._source if self .prompt else None
975+ input_prompt = self .rail . prompt ._source if self . rail .prompt else None
916976 input_instructions = (
917- self .instructions ._source if self .instructions else None
977+ self .rail . instructions ._source if self . rail .instructions else None
918978 )
919979 call_inputs = CallInputs (
920980 llm_api = llm_api ,
@@ -1013,10 +1073,10 @@ def _sync_parse(
10131073 prompt = kwargs .pop ("prompt" , None ),
10141074 msg_history = kwargs .pop ("msg_history" , None ),
10151075 api = get_llm_ask (llm_api , * args , ** kwargs ) if llm_api else None ,
1016- prompt_schema = self .prompt_schema ,
1017- instructions_schema = self .instructions_schema ,
1018- msg_history_schema = self .msg_history_schema ,
1019- output_schema = self .output_schema ,
1076+ prompt_schema = self .rail . prompt_schema ,
1077+ instructions_schema = self .rail . instructions_schema ,
1078+ msg_history_schema = self .rail . msg_history_schema ,
1079+ output_schema = self .rail . output_schema ,
10201080 num_reasks = num_reasks ,
10211081 metadata = metadata ,
10221082 output = llm_output ,
@@ -1055,10 +1115,10 @@ async def _async_parse(
10551115 prompt = kwargs .pop ("prompt" , None ),
10561116 msg_history = kwargs .pop ("msg_history" , None ),
10571117 api = get_async_llm_ask (llm_api , * args , ** kwargs ) if llm_api else None ,
1058- prompt_schema = self .prompt_schema ,
1059- instructions_schema = self .instructions_schema ,
1060- msg_history_schema = self .msg_history_schema ,
1061- output_schema = self .output_schema ,
1118+ prompt_schema = self .rail . prompt_schema ,
1119+ instructions_schema = self .rail . instructions_schema ,
1120+ msg_history_schema = self .rail . msg_history_schema ,
1121+ output_schema = self .rail . output_schema ,
10621122 num_reasks = num_reasks ,
10631123 metadata = metadata ,
10641124 output = llm_output ,
@@ -1267,11 +1327,17 @@ def validate(self, llm_output: str, *args, **kwargs) -> ValidationOutcome[str]:
12671327 ):
12681328 self .rail = Rail .from_string_validators (
12691329 validators = self ._validators ,
1270- prompt = self .prompt .source if self .prompt else None ,
1271- instructions = self .instructions .source if self .instructions else None ,
1272- reask_prompt = self .reask_prompt .source if self .reask_prompt else None ,
1273- reask_instructions = self .reask_instructions .source
1274- if self .reask_instructions
1330+ prompt = self .rail .prompt .source if self .rail .prompt else None ,
1331+ instructions = (
1332+ self .rail .instructions .source if self .rail .instructions else None
1333+ ),
1334+ reask_prompt = (
1335+ self .rail .output_schema .reask_prompt_template .source
1336+ if self .rail .output_schema .reask_prompt_template
1337+ else None
1338+ ),
1339+ reask_instructions = self .rail .output_schema .reask_instructions_template .source
1340+ if self .rail .output_schema .reask_instructions_template
12751341 else None ,
12761342 )
12771343
0 commit comments