1
- import copy
2
- from typing import Callable , List , Optional , Tuple , TypeVar
1
+ from typing import List , Optional , Tuple , TypeVar
3
2
4
3
from dbally .audit .event_tracker import EventTracker
5
- from dbally .iql_generator .iql_prompt_template import IQLPromptTemplate , default_iql_template
4
+ from dbally .iql_generator .iql_prompt_template import IQLPromptTemplate , default_iql_template # noqa
6
5
from dbally .llms .base import LLM
7
6
from dbally .llms .clients .base import LLMOptions
8
- from dbally .views . exposed_functions import ExposedFunction
7
+ from dbally .prompts . formatters import IQLInputFormatter
9
8
10
9
11
10
class IQLGenerator :
@@ -24,26 +23,16 @@ class IQLGenerator:
24
23
25
24
TException = TypeVar ("TException" , bound = Exception )
26
25
27
- def __init__ (
28
- self ,
29
- llm : LLM ,
30
- prompt_template : Optional [IQLPromptTemplate ] = None ,
31
- promptify_view : Optional [Callable ] = None ,
32
- ) -> None :
26
+ def __init__ (self , llm : LLM ) -> None :
33
27
"""
34
28
Args:
35
29
llm: LLM used to generate IQL
36
- prompt_template: If not provided by the users is set to `default_iql_template`
37
- promptify_view: Function formatting filters for prompt
38
30
"""
39
31
self ._llm = llm
40
- self ._prompt_template = prompt_template or copy .deepcopy (default_iql_template )
41
- self ._promptify_view = promptify_view or _promptify_filters
42
32
43
33
async def generate_iql (
44
34
self ,
45
- filters : List [ExposedFunction ],
46
- question : str ,
35
+ input_formatter : IQLInputFormatter ,
47
36
event_tracker : EventTracker ,
48
37
conversation : Optional [IQLPromptTemplate ] = None ,
49
38
llm_options : Optional [LLMOptions ] = None ,
@@ -52,30 +41,25 @@ async def generate_iql(
52
41
Uses LLM to generate IQL in text form
53
42
54
43
Args:
55
- question: user question
56
- filters: list of filters exposed by the view
44
+ input_formatter: formatter used to prepare prompt arguments dictionary
57
45
event_tracker: event store used to audit the generation process
58
46
conversation: conversation to be continued
59
47
llm_options: options to use for the LLM client
60
48
61
49
Returns:
62
50
IQL - iql generated based on the user question
63
51
"""
64
- filters_for_prompt = self ._promptify_view (filters )
65
52
66
- template = conversation or self . _prompt_template
53
+ conversation , fmt = input_formatter ( conversation or default_iql_template )
67
54
68
55
llm_response = await self ._llm .generate_text (
69
- template = template ,
70
- fmt = { "filters" : filters_for_prompt , "question" : question } ,
56
+ template = conversation ,
57
+ fmt = fmt ,
71
58
event_tracker = event_tracker ,
72
59
options = llm_options ,
73
60
)
74
61
75
- iql_filters = self ._prompt_template .llm_response_parser (llm_response )
76
-
77
- if conversation is None :
78
- conversation = self ._prompt_template
62
+ iql_filters = conversation .llm_response_parser (llm_response )
79
63
80
64
conversation = conversation .add_assistant_message (content = llm_response )
81
65
@@ -98,19 +82,3 @@ def add_error_msg(self, conversation: IQLPromptTemplate, errors: List[TException
98
82
msg += str (error ) + "\n "
99
83
100
84
return conversation .add_user_message (content = msg )
101
-
102
-
103
- def _promptify_filters (
104
- filters : List [ExposedFunction ],
105
- ) -> str :
106
- """
107
- Formats filters for prompt
108
-
109
- Args:
110
- filters: list of filters exposed by the view
111
-
112
- Returns:
113
- filters_for_prompt: filters formatted for prompt
114
- """
115
- filters_for_prompt = "\n " .join ([str (filter ) for filter in filters ])
116
- return filters_for_prompt
0 commit comments