mirror of
https://github.com/cdryzun/tg_bot_collections.git
synced 2025-04-29 00:27:09 +08:00
129 lines
4.2 KiB
Python
129 lines
4.2 KiB
Python
from os import environ
|
|
import re
|
|
|
|
import google.generativeai as genai
|
|
from google.generativeai.types.generation_types import StopCandidateException
|
|
from telebot import TeleBot
|
|
from telebot.types import Message
|
|
|
|
from . import bot_reply_markdown
|
|
|
|
GOOGLE_GEMINI_KEY = environ.get("GOOGLE_GEMINI_KEY")
|
|
|
|
genai.configure(api_key=GOOGLE_GEMINI_KEY)
|
|
generation_config = {
|
|
"temperature": 0.7,
|
|
"top_p": 1,
|
|
"top_k": 1,
|
|
"max_output_tokens": 4096,
|
|
}
|
|
|
|
safety_settings = [
|
|
{"category": "HARM_CATEGORY_HARASSMENT", "threshold": "BLOCK_NONE"},
|
|
{"category": "HARM_CATEGORY_HATE_SPEECH", "threshold": "BLOCK_NONE"},
|
|
{"category": "HARM_CATEGORY_SEXUALLY_EXPLICIT", "threshold": "BLOCK_NONE"},
|
|
{"category": "HARM_CATEGORY_DANGEROUS_CONTENT", "threshold": "BLOCK_NONE"},
|
|
]
|
|
|
|
# Global history cache
|
|
gemini_player_dict = {}
|
|
|
|
|
|
def make_new_gemini_convo():
|
|
model = genai.GenerativeModel(
|
|
model_name="gemini-pro",
|
|
generation_config=generation_config,
|
|
safety_settings=safety_settings,
|
|
)
|
|
convo = model.start_chat()
|
|
return convo
|
|
|
|
|
|
def gemini_handler(message: Message, bot: TeleBot) -> None:
|
|
"""Gemini : /gemini <question>"""
|
|
m = message.text.strip()
|
|
player = None
|
|
# restart will lose all TODO
|
|
if str(message.from_user.id) not in gemini_player_dict:
|
|
player = make_new_gemini_convo()
|
|
gemini_player_dict[str(message.from_user.id)] = player
|
|
else:
|
|
player = gemini_player_dict[str(message.from_user.id)]
|
|
q = m.strip()
|
|
if q == "clear" or len(q) == 0:
|
|
bot.reply_to(
|
|
message,
|
|
"just clear you gemini messages history",
|
|
)
|
|
player.history.clear()
|
|
return
|
|
|
|
# show something, make it more responsible
|
|
reply_id = bot.reply_to(message,
|
|
"**Gemini** is __thinking__...",
|
|
parse_mode="MarkdownV2"
|
|
)
|
|
|
|
# keep the last 5, every has two ask and answer.
|
|
if len(player.history) > 10:
|
|
player.history = player.history[2:]
|
|
|
|
try:
|
|
player.send_message(m)
|
|
gemini_reply_text = player.last.text.strip()
|
|
# Gemini is often using ':' in **Title** which not work in Telegram Markdown
|
|
gemini_reply_text = gemini_reply_text.replace(":**", "\:**")
|
|
except StopCandidateException as e:
|
|
match = re.search(r'content\s*{\s*parts\s*{\s*text:\s*"([^"]+)"', str(e))
|
|
if match:
|
|
gemini_reply_text = match.group(1)
|
|
gemini_reply_text = re.sub(r"\\n", "\n", gemini_reply_text)
|
|
else:
|
|
print("No meaningful text was extracted from the exception.")
|
|
bot.reply_to(
|
|
message,
|
|
"Google gemini encountered an error while generating an answer. Please check the log.",
|
|
)
|
|
return
|
|
|
|
# By default markdown
|
|
bot_reply_markdown(reply_id, "Gemini", gemini_reply_text, bot)
|
|
|
|
|
|
def gemini_photo_handler(message: Message, bot: TeleBot) -> None:
|
|
s = message.caption
|
|
reply_message = bot.reply_to(
|
|
message,
|
|
"Generating google gemini vision answer please wait.",
|
|
)
|
|
prompt = s.strip()
|
|
# get the high quaility picture.
|
|
max_size_photo = max(message.photo, key=lambda p: p.file_size)
|
|
file_path = bot.get_file(max_size_photo.file_id).file_path
|
|
downloaded_file = bot.download_file(file_path)
|
|
with open("gemini_temp.jpg", "wb") as temp_file:
|
|
temp_file.write(downloaded_file)
|
|
|
|
model = genai.GenerativeModel("gemini-pro-vision")
|
|
with open("gemini_temp.jpg", "rb") as image_file:
|
|
image_data = image_file.read()
|
|
contents = {
|
|
"parts": [{"mime_type": "image/jpeg", "data": image_data}, {"text": prompt}]
|
|
}
|
|
try:
|
|
response = model.generate_content(contents=contents)
|
|
bot.reply_to(message, "Gemini vision answer:\n" + response.text)
|
|
finally:
|
|
bot.delete_message(reply_message.chat.id, reply_message.message_id)
|
|
|
|
|
|
def register(bot: TeleBot) -> None:
|
|
bot.register_message_handler(gemini_handler, commands=["gemini"], pass_bot=True)
|
|
bot.register_message_handler(gemini_handler, regexp="^gemini:", pass_bot=True)
|
|
bot.register_message_handler(
|
|
gemini_photo_handler,
|
|
content_types=["photo"],
|
|
func=lambda m: m.caption and m.caption.startswith(("gemini:", "/gemini")),
|
|
pass_bot=True,
|
|
)
|