diff --git a/pr_agent/algo/__init__.py b/pr_agent/algo/__init__.py index 20e463c3..cf09f4ad 100644 --- a/pr_agent/algo/__init__.py +++ b/pr_agent/algo/__init__.py @@ -24,6 +24,8 @@ MAX_TOKENS = { 'o1-mini-2024-09-12': 128000, # 128K, but may be limited by config.max_model_tokens 'o1-preview': 128000, # 128K, but may be limited by config.max_model_tokens 'o1-preview-2024-09-12': 128000, # 128K, but may be limited by config.max_model_tokens + 'o1-2024-12-17': 204800, # 200K, but may be limited by config.max_model_tokens + 'o1': 204800, # 200K, but may be limited by config.max_model_tokens 'claude-instant-1': 100000, 'claude-2': 100000, 'command-nightly': 4096, diff --git a/pr_agent/algo/ai_handlers/litellm_ai_handler.py b/pr_agent/algo/ai_handlers/litellm_ai_handler.py index c53c1ec3..c8a752f8 100644 --- a/pr_agent/algo/ai_handlers/litellm_ai_handler.py +++ b/pr_agent/algo/ai_handlers/litellm_ai_handler.py @@ -193,8 +193,8 @@ class LiteLLMAIHandler(BaseAiHandler): messages[1]["content"] = [{"type": "text", "text": messages[1]["content"]}, {"type": "image_url", "image_url": {"url": img_path}}] - # Currently O1 does not support separate system and user prompts - O1_MODEL_PREFIX = 'o1-' + # Currently, model OpenAI o1 series does not support a separate system and user prompts + O1_MODEL_PREFIX = 'o1' model_type = model.split('/')[-1] if '/' in model else model if model_type.startswith(O1_MODEL_PREFIX): user = f"{system}\n\n\n{user}"