Skip to content

Commit 42e968d

Browse files
authored
Update cache.py (#435)
1 parent 3b78e31 commit 42e968d

File tree

1 file changed

+51
-51
lines changed

1 file changed

+51
-51
lines changed

samples/cache.py

Lines changed: 51 additions & 51 deletions
Original file line numberDiff line numberDiff line change
@@ -39,6 +39,57 @@ def test_cache_create(self):
3939
# [END cache_create]
4040
cache.delete()
4141

42+
def test_cache_create_from_name(self):
43+
# [START cache_create_from_name]
44+
document = genai.upload_file(path=media / "a11.txt")
45+
model_name = "gemini-1.5-flash-001"
46+
cache = genai.caching.CachedContent.create(
47+
model=model_name,
48+
system_instruction="You are an expert analyzing transcripts.",
49+
contents=[document],
50+
)
51+
cache_name = cache.name # Save the name for later
52+
53+
# Later
54+
cache = genai.caching.CachedContent.get(cache_name)
55+
apollo_model = genai.GenerativeModel.from_cached_content(cache)
56+
response = apollo_model.generate_content("Find a lighthearted moment from this transcript")
57+
print(response.text)
58+
# [END cache_create_from_name]
59+
cache.delete()
60+
61+
def test_cache_create_from_chat(self):
62+
# [START cache_create_from_chat]
63+
model_name = "gemini-1.5-flash-001"
64+
system_instruction = "You are an expert analyzing transcripts."
65+
66+
model = genai.GenerativeModel(model_name=model_name, system_instruction=system_instruction)
67+
chat = model.start_chat()
68+
document = genai.upload_file(path=media / "a11.txt")
69+
response = chat.send_message(["Hi, could you summarize this transcript?", document])
70+
print("\n\nmodel: ", response.text)
71+
response = chat.send_message(
72+
["Okay, could you tell me more about the trans-lunar injection"]
73+
)
74+
print("\n\nmodel: ", response.text)
75+
76+
# To cache the conversation so far, pass the chat history as the list of "contents".
77+
cache = genai.caching.CachedContent.create(
78+
model=model_name,
79+
system_instruction=system_instruction,
80+
contents=chat.history,
81+
)
82+
model = genai.GenerativeModel.from_cached_content(cached_content=cache)
83+
84+
# Continue the chat where you left off.
85+
chat = model.start_chat()
86+
response = chat.send_message(
87+
"I didn't understand that last part, could you explain it in simpler language?"
88+
)
89+
print("\n\nmodel: ", response.text)
90+
# [END cache_create_from_chat]
91+
cache.delete()
92+
4293
def test_cache_delete(self):
4394
# [START cache_delete]
4495
document = genai.upload_file(path=media / "a11.txt")
@@ -100,57 +151,6 @@ def test_cache_update(self):
100151
# [END cache_update]
101152
cache.delete()
102153

103-
def test_cache_create_from_name(self):
104-
# [START cache_create_from_name]
105-
document = genai.upload_file(path=media / "a11.txt")
106-
model_name = "gemini-1.5-flash-001"
107-
cache = genai.caching.CachedContent.create(
108-
model=model_name,
109-
system_instruction="You are an expert analyzing transcripts.",
110-
contents=[document],
111-
)
112-
cache_name = cache.name # Save the name for later
113-
114-
# Later
115-
cache = genai.caching.CachedContent.get(cache_name)
116-
apollo_model = genai.GenerativeModel.from_cached_content(cache)
117-
response = apollo_model.generate_content("Find a lighthearted moment from this transcript")
118-
print(response.text)
119-
# [END cache_create_from_name]
120-
cache.delete()
121-
122-
def test_cache_chat(self):
123-
# [START cache_chat]
124-
model_name = "gemini-1.5-flash-001"
125-
system_instruction = "You are an expert analyzing transcripts."
126-
127-
model = genai.GenerativeModel(model_name=model_name, system_instruction=system_instruction)
128-
chat = model.start_chat()
129-
document = genai.upload_file(path=media / "a11.txt")
130-
response = chat.send_message(["Hi, could you summarize this transcript?", document])
131-
print("\n\nmodel: ", response.text)
132-
response = chat.send_message(
133-
["Okay, could you tell me more about the trans-lunar injection"]
134-
)
135-
print("\n\nmodel: ", response.text)
136-
137-
# To cache the conversation so far, pass the chat history as the list of "contents".
138-
cache = genai.caching.CachedContent.create(
139-
model=model_name,
140-
system_instruction=system_instruction,
141-
contents=chat.history,
142-
)
143-
model = genai.GenerativeModel.from_cached_content(cached_content=cache)
144-
145-
# Continue the chat where you left off.
146-
chat = model.start_chat()
147-
response = chat.send_message(
148-
"I didn't understand that last part, could you explain it in simpler language?"
149-
)
150-
print("\n\nmodel: ", response.text)
151-
# [END cache_chat]
152-
cache.delete()
153-
154154

155155
if __name__ == "__main__":
156156
absltest.main()

0 commit comments

Comments
 (0)