Skip to content

Commit 9268b9f

Browse files
authored
Update simulator-interaction-data.md
1 parent 0130ade commit 9268b9f

File tree

1 file changed

+4
-4
lines changed

1 file changed

+4
-4
lines changed

articles/ai-foundry/how-to/develop/simulator-interaction-data.md

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -184,7 +184,7 @@ The `query_response_generating_prompty_override` parameter allows you to customi
184184

185185
```python
186186
current_dir = os.path.dirname(__file__)
187-
query_response_prompty_override = os.path.join(current_dir, "query_generator_long_answer.prompty") # Passes the `query_response_generating_prompty` parameter with the path to the custom prompt template.
187+
query_response_prompty_override = os.path.join(current_dir, "query_generator_long_answer.prompty") # Passes the query_response_generating_prompty parameter with the path to the custom prompt template.
188188

189189
tasks = [
190190
f"I am a student and I want to learn more about {wiki_search_term}",
@@ -222,7 +222,7 @@ outputs = await simulator(
222222
text=text,
223223
num_queries=1, # Minimal number of queries.
224224
user_simulator_prompty="user_simulating_application.prompty", # A prompty that accepts all the following kwargs can be passed to override the default user behavior.
225-
user_simulator_prompty_kwargs=user_simulator_prompty_kwargs # It uses a dictionary to override default model parameters such as `temperature` and `top_p`.
225+
user_simulator_prompty_kwargs=user_simulator_prompty_kwargs # It uses a dictionary to override default model parameters such as temperature and top_p.
226226
)
227227
```
228228

@@ -278,7 +278,7 @@ for item in data:
278278

279279
outputs = asyncio.run(grounding_simulator(
280280
target=callback,
281-
conversation_turns=conversation_turns, #generates 287 rows of data
281+
conversation_turns=conversation_turns, # This generates 287 rows of data.
282282
max_conversation_turns=1,
283283
))
284284

@@ -339,7 +339,7 @@ async def callback(
339339
if 'file_content' in messages["template_parameters"]:
340340
query += messages["template_parameters"]['file_content']
341341

342-
# Call your own endpoint and pass your query as input. Make sure to handle the error responses of `function_call_to_your_endpoint`.
342+
# Call your own endpoint and pass your query as input. Make sure to handle the error responses of function_call_to_your_endpoint.
343343
response = await function_call_to_your_endpoint(query)
344344

345345
# Format responses in OpenAI message protocol:

0 commit comments

Comments
 (0)