Skip to content

Commit 15a1a3f

Browse files
committed
Plugins: Switch AI agent from OpenAI to Gemini
Keys are easy to get at https://aistudio.google.com/app/api-keys
1 parent ab4053f commit 15a1a3f

File tree

7 files changed

+137
-55
lines changed

7 files changed

+137
-55
lines changed
Lines changed: 21 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,6 @@
1-
# This plugin will act as an interactive ChatGPT. It has the option to save the last few messages,
2-
# but note that each message save will incur a higher token usage when communitcating with OpenAI
3-
# as each message is included in the query to OpenAI. Also note that messages sent by other
1+
# This plugin will act as an interactive Gemini. It has the option to save the last few messages,
2+
# but note that each message save will incur a higher token usage when communitcating with Gemini
3+
# as each message is included in the query to Gemini. Also note that messages sent by other
44
# plugins are currently not being saved.
55
#
66
# You can specify a "history_timeout" if you only want the plugin to only remember messages within a
@@ -9,12 +9,12 @@
99
#
1010
# Sample setting:
1111
#
12-
# "shrimpgpt": {
13-
# "key": "your-openai-app-key",
14-
# "trigger": "chatgpt:", # Only respond to direct questions
15-
# "channel": "#chatgpt", # Only monitor one specific channel
12+
# "shrimpgemini": {
13+
# "key": "your-gemini-app-key",
14+
# "trigger": "gemini:", # Only respond to direct questions
15+
# "channel": "#gemini", # Only monitor one specific channel
1616
# "prompt": "You are an IRC bot. Users will post questions that you answer.",
17-
# "model": "gpt-4o-mini",
17+
# "model": "gemini-2.5-flash",
1818
# "max_tokens": 256,
1919
# "temperature": 0.2,
2020
# "max_history": 5, # Let the plugin "remember" the past 5 messages (including responses)
@@ -26,23 +26,23 @@
2626
# If "channel" is not specified, the plugin will respond in every channel.
2727
#
2828
# Commands:
29-
# * !gpt history reset
29+
# * !gemini history reset
3030
# Will manually trigger a reset of any previous messages saved.
3131

3232
import logging
3333
import sys
3434
import json
3535
import plugin
3636

37-
from utils import openai
37+
from utils import gemini
3838

3939
DEFAULT_PROMPT = """You are an IRC bot. Users will post questions that you answer."""
40-
DEFAULT_MODEL = "gpt-4o-mini"
40+
DEFAULT_MODEL = "gemini-2.5-flash"
4141

4242

43-
class shrimpgpt(plugin.Plugin):
43+
class shrimpgemini(plugin.Plugin):
4444
def __init__(self):
45-
plugin.Plugin.__init__(self, "shrimpgpt")
45+
plugin.Plugin.__init__(self, "shrimpgemini")
4646

4747
def started(self, settings):
4848
s = json.loads(settings)
@@ -52,15 +52,15 @@ def started(self, settings):
5252
prompt = s["prompt"] if "prompt" in s else DEFAULT_PROMPT
5353
self.system_message = {"role": "system", "content": prompt}
5454
self.model = s["model"] if "model" in s else DEFAULT_MODEL
55-
self.max_tokens = int(s["max_tokens"]) if "max_tokens" in s else 256
55+
self.max_tokens = int(s["max_tokens"]) if "max_tokens" in s else 512
5656
self.temperature = int(s["temperature"]) if "temperature" in s else 0.2
5757
self.history = []
5858
self.max_history = int(s["max_history"]) if "max_history" in s else 5
5959
# Max number of seconds of inactivity before cleaning out the message history
6060
self.history_timeout = int(s["history_timeout"]) if "history_timeout" in s else 120
6161
self.update_count = 0
6262
logging.info(
63-
"ShrimpGPT: Trigger: %s, channel: %s, model: %s, max_tokens: %i, temp: %i",
63+
"ShrimpGemini: Trigger: %s, channel: %s, model: %s, max_tokens: %i, temp: %i",
6464
self.trigger,
6565
self.channel,
6666
self.model,
@@ -74,12 +74,12 @@ def update(self):
7474
self.update_count += 1
7575
# Reset history if update_count exceeds "update_count"
7676
if self.update_count >= self.history_timeout:
77-
logging.info("ShrimpGPT: Resetting history: []")
77+
logging.info("ShrimpGemini: Resetting history: []")
7878
self.history = []
7979
self.update_count = 0
8080

8181
def reset_history(self, server, channel):
82-
logging.info("ShrimpGPT: Manual history reset")
82+
logging.info("ShrimpGemini: Manual history reset")
8383
self.history = []
8484
self.safe_privmsg(server, channel, "Reset!")
8585

@@ -96,17 +96,17 @@ def respond_to_message(self, query, server, channel):
9696
message = {"role": "user", "content": query}
9797
self.add_to_history(message)
9898
messages = [self.system_message] + self.history
99-
result = openai.get_response(
99+
result = gemini.get_response(
100100
self.key, messages, self.model, self.max_tokens, self.temperature
101101
)
102-
self.add_to_history({"role": "assistant", "content": result})
102+
self.add_to_history({"role": "model", "content": result})
103103
self.safe_privmsg(server, channel, result)
104104

105105
def on_pubmsg(self, server, user, channel, message):
106106
if self.channel and channel != self.channel:
107107
# Message is not in the specified channel
108108
return
109-
if message.startswith("!gpt history reset"):
109+
if message.startswith("!gemini history reset"):
110110
self._thread(self.reset_history, server, channel)
111111
return
112112
username = user.split("!", 1)[0]
@@ -122,4 +122,4 @@ def on_pubmsg(self, server, user, channel, message):
122122

123123

124124
if __name__ == "__main__":
125-
sys.exit(shrimpgpt.run())
125+
sys.exit(shrimpgemini.run())

plugins/youtubesummarizer/youtubesummarizer.py

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -1,36 +1,36 @@
1-
# Plugin that summarizes given youtube videos using OpenAI.
1+
# Plugin that summarizes given youtube videos using Gemini.
22
#
33
# Sample setting:
44
#
55
# "youtubesummarizer": {
6-
# "key": "your-openai-app-key",
6+
# "key": "your-gemini-app-key",
77
# "yt-key": "your-youtube-api-key",
88
# "channel": "#youtube",
9-
# "model": "gpt-4o-mini",
9+
# "model": "gemini-2.5-flash",
1010
# "max_tokens": 4096,
1111
# "temperature": 0.2
1212
# }
1313
#
1414
# The only required parameter is "key".
1515
# If "yt-key" is not specified, you could get a bit worse summary as some information
16-
# will not be passed on to openai.
16+
# will not be passed on to gemini.
1717
#
1818
# Commands:
19-
# * !gpt yt-summary https://www.youtube.com/watch?v=jNQXAC9IVRw
19+
# * !gemini yt-summary https://www.youtube.com/watch?v=jNQXAC9IVRw
2020

2121
import logging
2222
import sys
2323
import json
2424
import plugin
2525

2626
from youtube_transcript_api import YouTubeTranscriptApi
27-
from utils import youtube, openai
27+
from utils import youtube, gemini
2828

2929
YT_PROMPT = """
3030
Summarize the content of this YouTube video. End by giving a highlight link to the
3131
most important part of the video in the form of https://youtu.be/[id]?t=[timestamp]
3232
"""
33-
DEFAULT_MODEL = "gpt-4o-mini"
33+
DEFAULT_MODEL = "gemini-2.5-flash"
3434

3535

3636
class youtubesummarizer(plugin.Plugin):
@@ -68,7 +68,7 @@ def process_youtube(self, id, server, channel):
6868
{"role": "system", "content": YT_PROMPT},
6969
{"role": "user", "content": trans_str},
7070
]
71-
result = openai.get_response(
71+
result = gemini.get_response(
7272
self.key, messages, self.model, self.max_tokens, self.temperature
7373
)
7474
logging.info(result)
@@ -77,7 +77,7 @@ def process_youtube(self, id, server, channel):
7777
def on_pubmsg(self, server, user, channel, message):
7878
if self.channel and channel != self.channel:
7979
return
80-
if message.startswith("!gpt yt-summary"):
80+
if message.startswith("!gemini yt-summary"):
8181
for id in youtube.YouTube.find_all_ids(message):
8282
self._thread(self.process_youtube, id, server, channel)
8383

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -31,9 +31,9 @@ requests = "^2.32.3"
3131
feedparser = "^6.0.11"
3232
irc = "^20.5.0"
3333
pyzmq = "^26.2.0"
34-
openai = "^1.72.0"
3534
youtube-transcript-api = "^1.0.3"
3635
httpx = {version = "^0.28.1", extras = ["http2"]}
36+
google-genai = "^1.65.0"
3737

3838
[tool.poetry.dev-dependencies]
3939
requests-mock = "^1.12.1"

utils/gemini.py

Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,41 @@
1+
import traceback
2+
import logging
3+
from google import genai
4+
from google.genai import types
5+
6+
7+
def get_response(key, messages, model, max_tokens, temperature):
8+
client = genai.Client(api_key=key)
9+
10+
system_instruction = None
11+
contents = []
12+
13+
for msg in messages:
14+
role = msg["role"]
15+
content = msg["content"]
16+
if role == "system":
17+
system_instruction = content
18+
elif role == "user":
19+
contents.append(types.Content(role="user", parts=[types.Part.from_text(text=content)]))
20+
elif role == "assistant" or role == "model":
21+
contents.append(types.Content(role="model", parts=[types.Part.from_text(text=content)]))
22+
23+
result = "No result ...."
24+
try:
25+
response = client.models.generate_content(
26+
model=model,
27+
contents=contents,
28+
config=types.GenerateContentConfig(
29+
system_instruction=system_instruction,
30+
max_output_tokens=max_tokens,
31+
temperature=temperature,
32+
),
33+
)
34+
35+
if response.text:
36+
result = response.text
37+
except Exception as e:
38+
logging.error(traceback.format_exc())
39+
40+
logging.info("Result: " + result)
41+
return result

utils/openai.py

Lines changed: 0 additions & 24 deletions
This file was deleted.

utils/test/test_gemini.py

Lines changed: 65 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,65 @@
1+
import unittest
2+
from unittest.mock import MagicMock, patch
3+
from utils import gemini
4+
from google.genai import types
5+
6+
7+
class TestGemini(unittest.TestCase):
8+
@patch("google.genai.Client")
9+
def test_get_response(self, mock_client_class):
10+
mock_client = MagicMock()
11+
mock_client_class.return_value = mock_client
12+
mock_response = MagicMock()
13+
mock_response.text = "Hello, I am Gemini!"
14+
mock_client.models.generate_content.return_value = mock_response
15+
16+
messages = [
17+
{"role": "system", "content": "You are a helpful assistant."},
18+
{"role": "user", "content": "Hi there!"},
19+
]
20+
21+
result = gemini.get_response("fake-key", messages, "gemini-1.5-flash", 256, 0.2)
22+
23+
mock_client_class.assert_called_once_with(api_key="fake-key")
24+
mock_client.models.generate_content.assert_called_once()
25+
args, kwargs = mock_client.models.generate_content.call_args
26+
self.assertEqual(kwargs["model"], "gemini-1.5-flash")
27+
self.assertEqual(kwargs["config"].system_instruction, "You are a helpful assistant.")
28+
self.assertEqual(kwargs["config"].max_output_tokens, 256)
29+
self.assertEqual(kwargs["config"].temperature, 0.2)
30+
self.assertEqual(result, "Hello, I am Gemini!")
31+
32+
@patch("google.genai.Client")
33+
def test_get_response_with_history(self, mock_client_class):
34+
mock_client = MagicMock()
35+
mock_client_class.return_value = mock_client
36+
mock_response = MagicMock()
37+
mock_response.text = "I remember you!"
38+
mock_client.models.generate_content.return_value = mock_response
39+
40+
messages = [
41+
{"role": "system", "content": "You are a helpful assistant."},
42+
{"role": "user", "content": "My name is Alice."},
43+
{"role": "assistant", "content": "Hello Alice!"},
44+
{"role": "user", "content": "What is my name?"},
45+
]
46+
47+
result = gemini.get_response("fake-key", messages, "gemini-1.5-flash", 256, 0.2)
48+
49+
mock_client.models.generate_content.assert_called_once()
50+
args, kwargs = mock_client.models.generate_content.call_args
51+
52+
contents = kwargs["contents"]
53+
self.assertEqual(len(contents), 3)
54+
self.assertEqual(contents[0].role, "user")
55+
self.assertEqual(contents[0].parts[0].text, "My name is Alice.")
56+
self.assertEqual(contents[1].role, "model")
57+
self.assertEqual(contents[1].parts[0].text, "Hello Alice!")
58+
self.assertEqual(contents[2].role, "user")
59+
self.assertEqual(contents[2].parts[0].text, "What is my name?")
60+
61+
self.assertEqual(result, "I remember you!")
62+
63+
64+
if __name__ == "__main__":
65+
unittest.main()

0 commit comments

Comments
 (0)