From 4a892f391c785e955c364b6c6e4d3102497ee575 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E5=85=AC=E5=8F=B8git?= <240241002@qq.com> Date: Fri, 27 Feb 2026 10:36:08 +0800 Subject: [PATCH] =?UTF-8?q?```=20feat(api):=20=E6=B7=BB=E5=8A=A0=E6=A8=A1?= =?UTF-8?q?=E5=9E=8B=E7=B1=BB=E5=9E=8B=E5=88=A4=E6=96=AD=E9=80=BB=E8=BE=91?= =?UTF-8?q?=E4=BB=A5=E6=8E=92=E9=99=A4=E5=9B=BE=E5=83=8F=E6=A8=A1=E5=9E=8B?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit - 提取model_value.toLowerCase()到变量model_lower中以避免重复计算 - 在判断聊天模型时增加对'image'关键词的检查,排除图像模型 - 修复了原有逻辑中可能将图像模型误判为聊天模型的问题 ``` --- blueprints/api.py | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/blueprints/api.py b/blueprints/api.py index 1c2e399..e121013 100644 --- a/blueprints/api.py +++ b/blueprints/api.py @@ -72,7 +72,8 @@ def generate(): model_value = data.get('model') prompt = data.get('prompt') - is_chat_model = "gemini" in model_value.lower() or "gpt" in model_value.lower() + model_lower = model_value.lower() + is_chat_model = ("gemini" in model_lower or "gpt" in model_lower) and "image" not in model_lower # 3. 处理聊天模型 (同步) if is_chat_model: