Compare commits

..

2 Commits

4 changed files with 25 additions and 21 deletions

View File

@ -85,7 +85,19 @@ publish_labels = true
</tr> </tr>
<tr> <tr>
<td><b>collapsible_file_list</b></td> <td><b>collapsible_file_list</b></td>
<td>If set to true, the file list in the "Changes walkthrough" section will be collapsible. If set to "adaptive", the file list will be collapsible only if there are more than 8 files. Default is "adaptive".</td> <td>If set to true, the file list in the "Changes walkthrough" section will be collapsible. If set to "adaptive", the file list will be collapsible only if the number of files exceeds the threshold defined by `collapsible_file_list_threshold`. Default is "adaptive".</td>
</tr>
<tr>
<td><b>collapsible_file_list_threshold</b></td>
<td>The number of files threshold for when to make the file list collapsible when using 'adaptive' mode. Default is 8.</td>
</tr>
<tr>
<td><b>inline_file_summary</b></td>
<td>Controls where and how file summaries are displayed:
<br>- 'table': Displays file changes walkthrough table on both "Files changed" and "Conversation" tabs
<br>- true: Adds a collapsible file comment with changes title and summary for each file
<br>- false: Shows file changes walkthrough only in the "Conversation" tab
<br>Default is false.</td>
</tr> </tr>
<tr> <tr>
<td><b>enable_large_pr_handling</b></td> <td><b>enable_large_pr_handling</b></td>
@ -95,6 +107,14 @@ publish_labels = true
<td><b>enable_help_text</b></td> <td><b>enable_help_text</b></td>
<td>If set to true, the tool will display a help text in the comment. Default is false.</td> <td>If set to true, the tool will display a help text in the comment. Default is false.</td>
</tr> </tr>
<tr>
<td><b>enable_help_comment</b></td>
<td>If set to true, the help guidance text is displayed in the comment. Default is true.</td>
</tr>
<tr>
<td><b>enable_override_title_with_parent_tag</b></td>
<td>If set to true, the tool will set the PR title to the next patch version from the latest tag in the target branch, with an RC suffix (e.g., v1.0.1-rc.1). Default is false.</td>
</tr>
</table> </table>

View File

@ -85,19 +85,11 @@ MAX_TOKENS = {
} }
USER_MESSAGE_ONLY_MODELS = [ USER_MESSAGE_ONLY_MODELS = [
"deepseek/deepseek-reasoner",
"o1-mini",
"o1-mini-2024-09-12",
"o1-preview"
]
NO_SUPPORT_TEMPERATURE_MODELS = [
"deepseek/deepseek-reasoner", "deepseek/deepseek-reasoner",
"o1-mini", "o1-mini",
"o1-mini-2024-09-12", "o1-mini-2024-09-12",
"o1", "o1",
"o1-2024-12-17", "o1-2024-12-17",
"o3-mini", "o3-mini",
"o3-mini-2025-01-31", "o3-mini-2025-01-31"
"o1-preview"
] ]

View File

@ -6,7 +6,7 @@ import requests
from litellm import acompletion from litellm import acompletion
from tenacity import retry, retry_if_exception_type, stop_after_attempt from tenacity import retry, retry_if_exception_type, stop_after_attempt
from pr_agent.algo import NO_SUPPORT_TEMPERATURE_MODELS, USER_MESSAGE_ONLY_MODELS from pr_agent.algo import USER_MESSAGE_ONLY_MODELS
from pr_agent.algo.ai_handlers.base_ai_handler import BaseAiHandler from pr_agent.algo.ai_handlers.base_ai_handler import BaseAiHandler
from pr_agent.algo.utils import get_version from pr_agent.algo.utils import get_version
from pr_agent.config_loader import get_settings from pr_agent.config_loader import get_settings
@ -98,9 +98,6 @@ class LiteLLMAIHandler(BaseAiHandler):
# Models that only use user meessage # Models that only use user meessage
self.user_message_only_models = USER_MESSAGE_ONLY_MODELS self.user_message_only_models = USER_MESSAGE_ONLY_MODELS
# Model that doesn't support temperature argument
self.no_support_temperature_models = NO_SUPPORT_TEMPERATURE_MODELS
def prepare_logs(self, response, system, user, resp, finish_reason): def prepare_logs(self, response, system, user, resp, finish_reason):
response_log = response.dict().copy() response_log = response.dict().copy()
response_log['system'] = system response_log['system'] = system
@ -205,7 +202,7 @@ class LiteLLMAIHandler(BaseAiHandler):
{"type": "image_url", "image_url": {"url": img_path}}] {"type": "image_url", "image_url": {"url": img_path}}]
# Currently, some models do not support a separate system and user prompts # Currently, some models do not support a separate system and user prompts
if model in self.user_message_only_models: if self.user_message_only_models and any(entry.lower() in model.lower() for entry in self.user_message_only_models):
user = f"{system}\n\n\n{user}" user = f"{system}\n\n\n{user}"
system = "" system = ""
get_logger().info(f"Using model {model}, combining system and user prompts") get_logger().info(f"Using model {model}, combining system and user prompts")
@ -222,14 +219,11 @@ class LiteLLMAIHandler(BaseAiHandler):
"model": model, "model": model,
"deployment_id": deployment_id, "deployment_id": deployment_id,
"messages": messages, "messages": messages,
"temperature": temperature,
"timeout": get_settings().config.ai_timeout, "timeout": get_settings().config.ai_timeout,
"api_base": self.api_base, "api_base": self.api_base,
} }
# Add temperature only if model supports it
if model not in self.no_support_temperature_models:
kwargs["temperature"] = temperature
if get_settings().litellm.get("enable_callbacks", False): if get_settings().litellm.get("enable_callbacks", False):
kwargs = self.add_litellm_callbacks(kwargs) kwargs = self.add_litellm_callbacks(kwargs)

View File

@ -3,5 +3,3 @@
from setuptools import setup from setuptools import setup
setup() setup()
print("Hi")