File tree Expand file tree Collapse file tree 2 files changed +55
-0
lines changed Expand file tree Collapse file tree 2 files changed +55
-0
lines changed Original file line number Diff line number Diff line change 1+ #!/usr/bin/env python
2+ """Example script demonstrating an interactive LLM chatbot."""
3+
4+ import readline # Enables input line editing
5+
6+ import lmstudio as lm
7+
8+ model = lm .llm ()
9+ chat = lm .Chat ("You are a task focused AI assistant" )
10+
11+ while True :
12+ try :
13+ user_input = input ("You (leave blank to exit): " )
14+ except EOFError :
15+ print ()
16+ break
17+ if not user_input :
18+ break
19+ chat .add_user_message (user_input )
20+ prediction_stream = model .respond_stream (
21+ chat ,
22+ on_message = chat .append ,
23+ )
24+ print ("Bot: " , end = "" , flush = True )
25+ for fragment in prediction_stream :
26+ print (fragment .content , end = "" , flush = True )
27+ print ()
Original file line number Diff line number Diff line change 1+ #!/usr/bin/env python
2+ """Example script demonstrating a simulated terminal command processor."""
3+
4+ import readline # Enables input line editing
5+
6+ import lmstudio as lm
7+
8+ model = lm .llm ()
9+ console_history = []
10+
11+ while True :
12+ try :
13+ user_command = input ("$ " )
14+ except EOFError :
15+ print ()
16+ break
17+ if user_command .strip () == "exit" :
18+ break
19+ console_history .append (f"$ { user_command } " )
20+ history_prompt = "\n " .join (console_history )
21+ prediction_stream = model .complete_stream (
22+ history_prompt ,
23+ config = { "stopStrings" : ["$" ] },
24+ )
25+ for fragment in prediction_stream :
26+ print (fragment .content , end = "" , flush = True )
27+ print ()
28+ console_history .append (prediction_stream .result ().content )
You can’t perform that action at this time.
0 commit comments