Add reasoning_effort argument to chat completion request

This commit is contained in:
Trung Dinh
2025-02-21 22:16:18 +07:00
parent e81b0dca30
commit 9abb212e83

View File

@ -6,9 +6,9 @@ import requests
from litellm import acompletion
from tenacity import retry, retry_if_exception_type, stop_after_attempt
from pr_agent.algo import NO_SUPPORT_TEMPERATURE_MODELS, USER_MESSAGE_ONLY_MODELS
from pr_agent.algo import NO_SUPPORT_TEMPERATURE_MODELS, SUPPORT_REASONING_EFFORT_MODELS, USER_MESSAGE_ONLY_MODELS
from pr_agent.algo.ai_handlers.base_ai_handler import BaseAiHandler
from pr_agent.algo.utils import get_version
from pr_agent.algo.utils import ReasoningEffort, get_version
from pr_agent.config_loader import get_settings
from pr_agent.log import get_logger
@ -101,6 +101,9 @@ class LiteLLMAIHandler(BaseAiHandler):
# Model that doesn't support temperature argument
self.no_support_temperature_models = NO_SUPPORT_TEMPERATURE_MODELS
# Models that support reasoning effort
self.support_reasoning_models = SUPPORT_REASONING_EFFORT_MODELS
def prepare_logs(self, response, system, user, resp, finish_reason):
response_log = response.dict().copy()
response_log['system'] = system
@ -230,6 +233,13 @@ class LiteLLMAIHandler(BaseAiHandler):
if model not in self.no_support_temperature_models and not get_settings().config.custom_reasoning_model:
kwargs["temperature"] = temperature
# Add reasoning_effort if model supports it
if (model in self.support_reasoning_models):
supported_reasoning_efforts = [ReasoningEffort.HIGH.value, ReasoningEffort.MEDIUM.value, ReasoningEffort.LOW.value]
reasoning_effort = get_settings().config.reasoning_effort if (get_settings().config.reasoning_effort in supported_reasoning_efforts) else ReasoningEffort.MEDIUM.value
get_logger().info(f"Add reasoning_effort with value {reasoning_effort} to model {model}.")
kwargs["reasoning_effort"] = reasoning_effort
if get_settings().litellm.get("enable_callbacks", False):
kwargs = self.add_litellm_callbacks(kwargs)