From 82972d1cbde0980937134743162746d1e7bb82e4 Mon Sep 17 00:00:00 2001 From: F4ria Date: Thu, 11 Apr 2024 17:02:39 +0800 Subject: [PATCH] support streaming response for claude_photo_handler/gemini_photo_handler --- handlers/claude.py | 12 +++++++++++- handlers/gemini.py | 25 ++++++++++++++++--------- 2 files changed, 27 insertions(+), 10 deletions(-) diff --git a/handlers/claude.py b/handlers/claude.py index 3dbe141..8ff60f5 100644 --- a/handlers/claude.py +++ b/handlers/claude.py @@ -191,8 +191,18 @@ def claude_photo_handler(message: Message, bot: TeleBot) -> None: }, ], model=ANTHROPIC_MODEL, + stream=True, ) - bot_reply_markdown(reply_id, who, r.content[0].text, bot) + s = "" + start = time.time() + for e in r: + if e.type == "content_block_delta": + s += e.delta.text + if time.time() - start > 1.7: + start = time.time() + bot_reply_markdown(reply_id, who, s, bot, split_text=False) + + bot_reply_markdown(reply_id, who, s, bot) except Exception as e: print(e) bot_reply_markdown(reply_id, who, "answer wrong", bot) diff --git a/handlers/gemini.py b/handlers/gemini.py index a3da981..de6882c 100644 --- a/handlers/gemini.py +++ b/handlers/gemini.py @@ -129,7 +129,6 @@ def gemini_pro_handler(message: Message, bot: TeleBot) -> None: start = time.time() for e in r: s += e.text - print(s) if time.time() - start > 1.7: start = time.time() bot_reply_markdown(reply_id, who, s, bot, split_text=False) @@ -148,11 +147,10 @@ def gemini_pro_handler(message: Message, bot: TeleBot) -> None: def gemini_photo_handler(message: Message, bot: TeleBot) -> None: s = message.caption - reply_message = bot.reply_to( - message, - "Generating google gemini vision answer please wait.", - ) prompt = s.strip() + who = "Gemini Vision" + # show something, make it more responsible + reply_id = bot_reply_first(message, who, bot) # get the high quaility picture. max_size_photo = max(message.photo, key=lambda p: p.file_size) file_path = bot.get_file(max_size_photo.file_id).file_path @@ -167,10 +165,19 @@ def gemini_photo_handler(message: Message, bot: TeleBot) -> None: "parts": [{"mime_type": "image/jpeg", "data": image_data}, {"text": prompt}] } try: - response = model.generate_content(contents=contents) - bot.reply_to(message, "Gemini vision answer:\n" + response.text) - finally: - bot.delete_message(reply_message.chat.id, reply_message.message_id) + r = model.generate_content(contents=contents, stream=True) + s = "" + start = time.time() + for e in r: + s += e.text + if time.time() - start > 1.7: + start = time.time() + bot_reply_markdown(reply_id, who, s, bot, split_text=False) + + bot_reply_markdown(reply_id, who, s, bot) + except Exception as e: + print(e) + bot_reply_markdown(reply_id, who, "answer wrong", bot) def register(bot: TeleBot) -> None: