File tree Expand file tree Collapse file tree 1 file changed +39
-0
lines changed Expand file tree Collapse file tree 1 file changed +39
-0
lines changed Original file line number Diff line number Diff line change 1+ from mellea .backends .huggingface import LocalHFBackend
2+ from mellea .backends .model_ids import IBM_GRANITE_3_3_8B
3+ from mellea .backends .types import ModelOption
4+ from mellea .stdlib .base import CBlock , LinearContext
5+ from mellea .stdlib .chat import Message
6+
7+ ctx = LinearContext (window_size = 100 )
8+ ctx .insert (
9+ CBlock (
10+ "Nathan Fulton is a Senior Research Scientist at the MIT-IBM Watson AI Lab, a joint venture between MIT and IBM." ,
11+ cache = True ,
12+ )
13+ )
14+ ctx .insert (
15+ CBlock (
16+ "The MIT-IBM Watson AI Lab is located at 314 Main St, Cambridge, Massachusetts." ,
17+ cache = True ,
18+ )
19+ )
20+ ctx .insert (CBlock ("The ZIP code for 314 Main St, Cambridge, Massachusetts is 02142" ))
21+
22+
23+ msg = Message (
24+ role = "user" , content = "What is the likely ZIP code of Nathan Fulton's work address."
25+ )
26+ backend = LocalHFBackend (model_id = IBM_GRANITE_3_3_8B )
27+ result = backend ._generate_from_context_with_kv_cache (
28+ action = msg , ctx = ctx , model_options = {ModelOption .MAX_NEW_TOKENS : 1000 }
29+ )
30+ print (f".{ result } ." )
31+
32+ msg2 = Message (
33+ role = "user" ,
34+ content = "We know that Nathan does not work for a university. What is the likely name of Nathan's employer?" ,
35+ )
36+ result = backend ._generate_from_context_with_kv_cache (
37+ action = msg2 , ctx = ctx , model_options = {ModelOption .MAX_NEW_TOKENS : 1000 }
38+ )
39+ print (f".{ result } ." )
You can’t perform that action at this time.
0 commit comments