Skip to content

Commit 5bc4fd4

Browse files
committed
Backport PR ipython#14877: Fix LLM prefix including the new lines which are used as spaces
1 parent e62b356 commit 5bc4fd4

File tree

3 files changed

+22
-14
lines changed

3 files changed

+22
-14
lines changed

IPython/terminal/shortcuts/auto_suggest.py

Lines changed: 13 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -174,7 +174,7 @@ class NavigableAutoSuggestFromHistory(AutoSuggestFromHistory):
174174
# This is the instance of the LLM provider from jupyter-ai to which we forward the request
175175
# to generate inline completions.
176176
_llm_provider: Any | None
177-
_llm_prefixer: callable = lambda self, x: "wrong"
177+
_llm_prefixer: Callable = lambda self, x: "wrong"
178178

179179
def __init__(self):
180180
super().__init__()
@@ -325,17 +325,14 @@ async def _trigger_llm(self, buffer) -> None:
325325
"""
326326
# we likely want to store the current cursor position, and cancel if the cursor has moved.
327327
try:
328-
import jupyter_ai_magics
329328
import jupyter_ai.completions.models as jai_models
330329
except ModuleNotFoundError:
331330
jai_models = None
332331
if not self._llm_provider:
333332
warnings.warn("No LLM provider found, cannot trigger LLM completions")
334333
return
335334
if jai_models is None:
336-
warnings.warn(
337-
"LLM Completion requires `jupyter_ai_magics` and `jupyter_ai` to be installed"
338-
)
335+
warnings.warn("LLM Completion requires `jupyter_ai` to be installed")
339336

340337
self._cancel_running_llm_task()
341338

@@ -365,7 +362,7 @@ async def _trigger_llm_core(self, buffer: Buffer):
365362
Unlike with JupyterAi, as we do not have multiple cell, the cell id
366363
is always set to `None`.
367364
368-
We set the prefix to the current cell content, but could also inset the
365+
We set the prefix to the current cell content, but could also insert the
369366
rest of the history or even just the non-fail history.
370367
371368
In the same way, we do not have cell id.
@@ -378,21 +375,27 @@ async def _trigger_llm_core(self, buffer: Buffer):
378375
providers.
379376
"""
380377
try:
381-
import jupyter_ai_magics
382378
import jupyter_ai.completions.models as jai_models
383379
except ModuleNotFoundError:
384380
jai_models = None
385381

382+
if not jai_models:
383+
raise ValueError("jupyter-ai is not installed")
384+
385+
if not self._llm_provider:
386+
raise ValueError("No LLM provider found, cannot trigger LLM completions")
387+
386388
hm = buffer.history.shell.history_manager
387389
prefix = self._llm_prefixer(hm)
388390
get_ipython().log.debug("prefix: %s", prefix)
389391

390392
self._request_number += 1
391393
request_number = self._request_number
394+
392395
request = jai_models.InlineCompletionRequest(
393396
number=request_number,
394-
prefix=prefix + buffer.document.text,
395-
suffix="",
397+
prefix=prefix + buffer.document.text_before_cursor,
398+
suffix=buffer.document.text_after_cursor,
396399
mime="text/x-python",
397400
stream=True,
398401
path=None,
@@ -438,7 +441,7 @@ async def llm_autosuggestion(event: KeyPressEvent):
438441
doc = event.current_buffer.document
439442
lines_to_insert = max(0, _MIN_LINES - doc.line_count + doc.cursor_position_row)
440443
for _ in range(lines_to_insert):
441-
event.current_buffer.insert_text("\n", move_cursor=False)
444+
event.current_buffer.insert_text("\n", move_cursor=False, fire_event=False)
442445

443446
await provider._trigger_llm(event.current_buffer)
444447

IPython/terminal/tests/fake_llm.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ async def stream_inline_completions(self, request):
4242

4343
assert request.number > 0
4444
token = f"t{request.number}s0"
45-
last_line = request.prefix.rstrip("\n").splitlines()[-1]
45+
last_line = request.prefix.splitlines()[-1]
4646

4747
if not FIBONACCI.startswith(last_line):
4848
return

IPython/terminal/tests/test_shortcuts.py

Lines changed: 8 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
import pytest
2+
from IPython.terminal.interactiveshell import PtkHistoryAdapter
23
from IPython.terminal.shortcuts.auto_suggest import (
34
accept,
45
accept_or_jump_to_end,
@@ -46,7 +47,7 @@ def make_event(text, cursor, suggestion):
4647
try:
4748
from .fake_llm import FIBONACCI
4849
except ImportError:
49-
FIBONACCI = None
50+
FIBONACCI = ""
5051

5152

5253
@dec.skip_without("jupyter_ai")
@@ -56,9 +57,13 @@ async def test_llm_autosuggestion():
5657
ip = get_ipython()
5758
ip.auto_suggest = provider
5859
ip.llm_provider_class = "IPython.terminal.tests.fake_llm.FibonacciCompletionProvider"
60+
ip.history_manager.get_range = Mock(return_value=[])
5961
text = "def fib"
60-
event = make_event(text, len(text), "")
61-
event.current_buffer.history.shell.history_manager.get_range = Mock(return_value=[])
62+
event = Mock()
63+
event.current_buffer = Buffer(
64+
history=PtkHistoryAdapter(ip),
65+
)
66+
event.current_buffer.insert_text(text, move_cursor=True)
6267
await llm_autosuggestion(event)
6368
assert event.current_buffer.suggestion.text == FIBONACCI[len(text) :]
6469

0 commit comments

Comments
 (0)