fix: correct model type extraction for O1 model handling in litellm_ai_handler.py

This commit is contained in:
mrT23
2024-10-19 11:32:45 +03:00
parent e82afdd2cb
commit b7437147af

View File

@ -188,7 +188,8 @@ class LiteLLMAIHandler(BaseAiHandler):
# Currently O1 does not support separate system and user prompts
O1_MODEL_PREFIX = 'o1-'
if model.startswith(O1_MODEL_PREFIX):
model_type = model.split('/')[-1] # 'azure/o1-' or 'o1-'
if model_type.startswith(O1_MODEL_PREFIX):
user = f"{system}\n\n\n{user}"
system = ""
get_logger().info(f"Using O1 model, combining system and user prompts")