mirror of
				https://github.com/cdryzun/tg_bot_collections.git
				synced 2025-11-04 08:46:44 +08:00 
			
		
		
		
	support streaming response for claude_photo_handler/gemini_photo_handler
This commit is contained in:
		@ -191,8 +191,18 @@ def claude_photo_handler(message: Message, bot: TeleBot) -> None:
 | 
				
			|||||||
                    },
 | 
					                    },
 | 
				
			||||||
                ],
 | 
					                ],
 | 
				
			||||||
                model=ANTHROPIC_MODEL,
 | 
					                model=ANTHROPIC_MODEL,
 | 
				
			||||||
 | 
					                stream=True,
 | 
				
			||||||
            )
 | 
					            )
 | 
				
			||||||
            bot_reply_markdown(reply_id, who, r.content[0].text, bot)
 | 
					            s = ""
 | 
				
			||||||
 | 
					            start = time.time()
 | 
				
			||||||
 | 
					            for e in r:
 | 
				
			||||||
 | 
					                if e.type == "content_block_delta":
 | 
				
			||||||
 | 
					                    s += e.delta.text
 | 
				
			||||||
 | 
					                if time.time() - start > 1.7:
 | 
				
			||||||
 | 
					                    start = time.time()
 | 
				
			||||||
 | 
					                    bot_reply_markdown(reply_id, who, s, bot, split_text=False)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					            bot_reply_markdown(reply_id, who, s, bot)
 | 
				
			||||||
    except Exception as e:
 | 
					    except Exception as e:
 | 
				
			||||||
        print(e)
 | 
					        print(e)
 | 
				
			||||||
        bot_reply_markdown(reply_id, who, "answer wrong", bot)
 | 
					        bot_reply_markdown(reply_id, who, "answer wrong", bot)
 | 
				
			||||||
 | 
				
			|||||||
@ -129,7 +129,6 @@ def gemini_pro_handler(message: Message, bot: TeleBot) -> None:
 | 
				
			|||||||
        start = time.time()
 | 
					        start = time.time()
 | 
				
			||||||
        for e in r:
 | 
					        for e in r:
 | 
				
			||||||
            s += e.text
 | 
					            s += e.text
 | 
				
			||||||
            print(s)
 | 
					 | 
				
			||||||
            if time.time() - start > 1.7:
 | 
					            if time.time() - start > 1.7:
 | 
				
			||||||
                start = time.time()
 | 
					                start = time.time()
 | 
				
			||||||
                bot_reply_markdown(reply_id, who, s, bot, split_text=False)
 | 
					                bot_reply_markdown(reply_id, who, s, bot, split_text=False)
 | 
				
			||||||
@ -148,11 +147,10 @@ def gemini_pro_handler(message: Message, bot: TeleBot) -> None:
 | 
				
			|||||||
 | 
					
 | 
				
			||||||
def gemini_photo_handler(message: Message, bot: TeleBot) -> None:
 | 
					def gemini_photo_handler(message: Message, bot: TeleBot) -> None:
 | 
				
			||||||
    s = message.caption
 | 
					    s = message.caption
 | 
				
			||||||
    reply_message = bot.reply_to(
 | 
					 | 
				
			||||||
        message,
 | 
					 | 
				
			||||||
        "Generating google gemini vision answer please wait.",
 | 
					 | 
				
			||||||
    )
 | 
					 | 
				
			||||||
    prompt = s.strip()
 | 
					    prompt = s.strip()
 | 
				
			||||||
 | 
					    who = "Gemini Vision"
 | 
				
			||||||
 | 
					    # show something, make it more responsible
 | 
				
			||||||
 | 
					    reply_id = bot_reply_first(message, who, bot)
 | 
				
			||||||
    # get the high quaility picture.
 | 
					    # get the high quaility picture.
 | 
				
			||||||
    max_size_photo = max(message.photo, key=lambda p: p.file_size)
 | 
					    max_size_photo = max(message.photo, key=lambda p: p.file_size)
 | 
				
			||||||
    file_path = bot.get_file(max_size_photo.file_id).file_path
 | 
					    file_path = bot.get_file(max_size_photo.file_id).file_path
 | 
				
			||||||
@ -167,10 +165,19 @@ def gemini_photo_handler(message: Message, bot: TeleBot) -> None:
 | 
				
			|||||||
        "parts": [{"mime_type": "image/jpeg", "data": image_data}, {"text": prompt}]
 | 
					        "parts": [{"mime_type": "image/jpeg", "data": image_data}, {"text": prompt}]
 | 
				
			||||||
    }
 | 
					    }
 | 
				
			||||||
    try:
 | 
					    try:
 | 
				
			||||||
        response = model.generate_content(contents=contents)
 | 
					        r = model.generate_content(contents=contents, stream=True)
 | 
				
			||||||
        bot.reply_to(message, "Gemini vision answer:\n" + response.text)
 | 
					        s = ""
 | 
				
			||||||
    finally:
 | 
					        start = time.time()
 | 
				
			||||||
        bot.delete_message(reply_message.chat.id, reply_message.message_id)
 | 
					        for e in r:
 | 
				
			||||||
 | 
					            s += e.text
 | 
				
			||||||
 | 
					            if time.time() - start > 1.7:
 | 
				
			||||||
 | 
					                start = time.time()
 | 
				
			||||||
 | 
					                bot_reply_markdown(reply_id, who, s, bot, split_text=False)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					        bot_reply_markdown(reply_id, who, s, bot)
 | 
				
			||||||
 | 
					    except Exception as e:
 | 
				
			||||||
 | 
					        print(e)
 | 
				
			||||||
 | 
					        bot_reply_markdown(reply_id, who, "answer wrong", bot)
 | 
				
			||||||
 | 
					
 | 
				
			||||||
 | 
					
 | 
				
			||||||
def register(bot: TeleBot) -> None:
 | 
					def register(bot: TeleBot) -> None:
 | 
				
			||||||
 | 
				
			|||||||
		Reference in New Issue
	
	Block a user