Skip to content

Commit a335505

Browse files
authored
Removing leading/trailing newlines from Context creation prompt (#1013)
1 parent f1f7d5b commit a335505

File tree

1 file changed

+10
-4
lines changed

1 file changed

+10
-4
lines changed

paperqa/core.py

Lines changed: 10 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -163,9 +163,13 @@ async def map_fxn_summary(
163163
success = False
164164

165165
if summary_llm_model and prompt_templates:
166-
data = {"question": question, "citation": citation, "text": text.text} | (
167-
extra_prompt_data or {}
168-
)
166+
data = {
167+
"question": question,
168+
"citation": citation,
169+
# Strip newlines in case chunking led to blank lines,
170+
# but not spaces, to preserve text alignment
171+
"text": text.text.strip("\n"),
172+
} | (extra_prompt_data or {})
169173
message_prompt, system_prompt = prompt_templates
170174
messages = [
171175
Message(role="system", content=system_prompt.format(**data)),
@@ -193,7 +197,9 @@ async def map_fxn_summary(
193197
except KeyError:
194198
success = False
195199
else:
196-
context = text.text
200+
# Strip newlines in case chunking led to blank lines,
201+
# but not spaces, to preserve text alignment
202+
context = text.text.strip("\n")
197203
# If we don't assign scores, just default to 5.
198204
# why 5? Because we filter out 0s in another place
199205
# and 5/10 is the other default I could come up with

0 commit comments

Comments
 (0)