Skip to content

Commit 53e5e24

Browse files
committed
[MINOR] Doc fix
1 parent 670011a commit 53e5e24

File tree

2 files changed

+6
-8
lines changed

2 files changed

+6
-8
lines changed

adalflow/adalflow/core/prompt_builder.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,4 @@
1-
"""Class prompt builder for LightRAG system prompt."""
1+
"""Class prompt builder for AdalFlow system prompt."""
22

33
from typing import Dict, Any, Optional, List, TypeVar
44
import logging
@@ -20,10 +20,10 @@
2020
class Prompt(Component):
2121
__doc__ = r"""Renders a text string(prompt) from a Jinja2 template string.
2222
23-
In default, we use the :ref:`DEFAULT_LIGHTRAG_SYSTEM_PROMPT<core-default_prompt_template>` as the template.
23+
In default, we use the :ref:`DEFAULT_ADALFLOW_SYSTEM_PROMPT<core-default_prompt_template>` as the template.
2424
2525
Args:
26-
template (str, optional): The Jinja2 template string. Defaults to DEFAULT_LIGHTRAG_SYSTEM_PROMPT.
26+
template (str, optional): The Jinja2 template string. Defaults to DEFAULT_ADALFLOW_SYSTEM_PROMPT.
2727
preset_prompt_kwargs (Optional[Dict], optional): The preset prompt kwargs to fill in the variables in the prompt. Defaults to {}.
2828
2929
Examples:

docs/source/tutorials/tool_helper.rst

Lines changed: 3 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -510,8 +510,8 @@ We will use :class:`components.output_parsers.outputs.JsonOutputParser` to strea
510510
511511
from adalflow.components.output_parsers import JsonOutputParser
512512
513-
func_parser = JsonOutputParser(data_class=Function)
514-
instructions = func_parser.format_instructions(exclude=["thought", "args"])
513+
func_parser = JsonOutputParser(data_class=Function, exclude_fields=["thought", "args"])
514+
instructions = func_parser.format_instructions()
515515
print(instructions)
516516
517517
The output is:
@@ -542,9 +542,7 @@ Now, let's prepare our generator with the above prompt, ``Function`` data class,
542542
model_kwargs = {"model": "gpt-3.5-turbo"}
543543
prompt_kwargs = {
544544
"tools": tool_manager.yaml_definitions,
545-
"output_format_str": func_parser.format_instructions(
546-
exclude=["thought", "args"]
547-
),
545+
"output_format_str": func_parser.format_instructions(),
548546
}
549547
generator = Generator(
550548
model_client=ModelClientType.OPENAI(),

0 commit comments

Comments
 (0)