@@ -4,7 +4,7 @@ module DiscourseAi
44 module Personas
55 module Tools
66 class Researcher < Tool
7- attr_reader :last_filter , :result_count
7+ attr_reader :filter , :result_count , :goals , :dry_run
88
99 class << self
1010 def signature
@@ -51,25 +51,41 @@ def accepted_options
5151 end
5252
5353 def invoke ( &blk )
54- @last_filter = parameters [ :filter ] || ""
54+ @filter = parameters [ :filter ] || ""
55+ @goals = parameters [ :goals ] || ""
56+ @dry_run = parameters [ :dry_run ] . nil? ? false : parameters [ :dry_run ]
57+
5558 post = Post . find_by ( id : context . post_id )
5659 goals = parameters [ :goals ] || ""
5760 dry_run = parameters [ :dry_run ] . nil? ? false : parameters [ :dry_run ]
5861
5962 return { error : "No goals provided" } if goals . blank?
60- return { error : "No filter provided" } if @last_filter . blank?
61-
62- filter = DiscourseAi ::Utils ::Research ::Filter . new ( @last_filter )
63+ return { error : "No filter provided" } if @filter . blank?
6364
65+ filter = DiscourseAi ::Utils ::Research ::Filter . new ( @filter )
6466 @result_count = filter . search . count
6567
68+ blk . call details
69+
6670 if dry_run
67- { dry_run : true , goals : goals , filter : @last_filter , number_of_results : @result_count }
71+ { dry_run : true , goals : goals , filter : @filter , number_of_results : @result_count }
6872 else
6973 process_filter ( filter , goals , post , &blk )
7074 end
7175 end
7276
77+ def details
78+ if @dry_run
79+ I18n . t ( "discourse_ai.ai_bot.tool_description.researcher_dry_run" , description_args )
80+ else
81+ I18n . t ( "discourse_ai.ai_bot.tool_description.researcher" , description_args )
82+ end
83+ end
84+
85+ def description_args
86+ { count : @result_count || 0 , filter : @filter || "" , goals : @goals || "" }
87+ end
88+
7389 protected
7490
7591 MIN_TOKENS_FOR_RESEARCH = 8000
@@ -88,7 +104,7 @@ def process_filter(filter, goals, post, &blk)
88104 results = [ ]
89105
90106 formatter . each_chunk { |chunk | results << run_inference ( chunk [ :text ] , goals , post , &blk ) }
91- { dry_run : false , goals : goals , filter : @last_filter , results : results }
107+ { dry_run : false , goals : goals , filter : @filter , results : results }
92108 end
93109
94110 def run_inference ( chunk_text , goals , post , &blk )
@@ -111,7 +127,9 @@ def run_inference(chunk_text, goals, post, &blk)
111127 cancel_manager : context . cancel_manager ,
112128 ) { |partial | results << partial }
113129
114- blk . call ( "." )
130+ @progress_dots ||= 0
131+ @progress_dots += 1
132+ blk . call ( details + "\n \n #{ "." * @progress_dots } " )
115133 results . join
116134 end
117135
@@ -135,37 +153,6 @@ def goal_user_prompt(goals, chunk_text)
135153 Your goal is: #{ goals }
136154 TEXT
137155 end
138-
139- def description_args
140- { count : @result_count || 0 , filter : @last_filter || "" }
141- end
142-
143- private
144-
145- def simulate_count ( filter_components )
146- # In a real implementation, this would query the database to get a count
147- # For now, return a simulated count
148- rand ( 10 ..100 )
149- end
150-
151- def perform_research ( filter_components , goals , max_results )
152- # This would perform the actual research based on the filter and goal
153- # For now, return a simplified result structure
154- format_results ( [ ] , %w[ content url author date ] )
155- end
156-
157- def calculate_max_results ( llm )
158- max_results = options [ :max_results ] . to_i
159- return [ max_results , 100 ] . min if max_results > 0
160-
161- if llm . max_prompt_tokens > 30_000
162- 50
163- elsif llm . max_prompt_tokens > 10_000
164- 30
165- else
166- 15
167- end
168- end
169156 end
170157 end
171158 end
0 commit comments