mirror of
https://github.com/qodo-ai/pr-agent.git
synced 2025-07-04 12:50:38 +08:00
Add is_auto_command
config check to conditionally publish output and update GitHub App to set this flag
This commit is contained in:
@ -128,6 +128,7 @@ async def handle_new_pr_opened(body: Dict[str, Any],
|
|||||||
log_context: Dict[str, Any],
|
log_context: Dict[str, Any],
|
||||||
agent: PRAgent):
|
agent: PRAgent):
|
||||||
title = body.get("pull_request", {}).get("title", "")
|
title = body.get("pull_request", {}).get("title", "")
|
||||||
|
get_settings().config.is_auto_command = True
|
||||||
|
|
||||||
# logic to ignore PRs with specific titles (e.g. "[Auto] ...")
|
# logic to ignore PRs with specific titles (e.g. "[Auto] ...")
|
||||||
ignore_pr_title_re = get_settings().get("GITHUB_APP.IGNORE_PR_TITLE", [])
|
ignore_pr_title_re = get_settings().get("GITHUB_APP.IGNORE_PR_TITLE", [])
|
||||||
|
@ -21,6 +21,7 @@ ai_disclaimer_title="" # Pro feature, title for a collapsible disclaimer to AI
|
|||||||
ai_disclaimer="" # Pro feature, full text for the AI disclaimer
|
ai_disclaimer="" # Pro feature, full text for the AI disclaimer
|
||||||
output_relevant_configurations=false
|
output_relevant_configurations=false
|
||||||
large_patch_policy = "clip" # "clip", "skip"
|
large_patch_policy = "clip" # "clip", "skip"
|
||||||
|
is_auto_command=false
|
||||||
|
|
||||||
[pr_reviewer] # /review #
|
[pr_reviewer] # /review #
|
||||||
# enable/disable features
|
# enable/disable features
|
||||||
|
@ -76,7 +76,8 @@ class PRCodeSuggestions:
|
|||||||
relevant_configs = {'pr_code_suggestions': dict(get_settings().pr_code_suggestions),
|
relevant_configs = {'pr_code_suggestions': dict(get_settings().pr_code_suggestions),
|
||||||
'config': dict(get_settings().config)}
|
'config': dict(get_settings().config)}
|
||||||
get_logger().debug("Relevant configs", artifacts=relevant_configs)
|
get_logger().debug("Relevant configs", artifacts=relevant_configs)
|
||||||
if get_settings().config.publish_output and get_settings().config.publish_output_progress:
|
if (get_settings().config.publish_output and get_settings().config.publish_output_progress and
|
||||||
|
not get_settings().config.get('is_auto_command', False)):
|
||||||
if self.git_provider.is_supported("gfm_markdown"):
|
if self.git_provider.is_supported("gfm_markdown"):
|
||||||
self.progress_response = self.git_provider.publish_comment(self.progress)
|
self.progress_response = self.git_provider.publish_comment(self.progress)
|
||||||
else:
|
else:
|
||||||
|
@ -78,7 +78,7 @@ class PRDescription:
|
|||||||
relevant_configs = {'pr_description': dict(get_settings().pr_description),
|
relevant_configs = {'pr_description': dict(get_settings().pr_description),
|
||||||
'config': dict(get_settings().config)}
|
'config': dict(get_settings().config)}
|
||||||
get_logger().debug("Relevant configs", artifacts=relevant_configs)
|
get_logger().debug("Relevant configs", artifacts=relevant_configs)
|
||||||
if get_settings().config.publish_output:
|
if get_settings().config.publish_output and not get_settings().config.get('is_auto_command', False):
|
||||||
self.git_provider.publish_comment("Preparing PR description...", is_temporary=True)
|
self.git_provider.publish_comment("Preparing PR description...", is_temporary=True)
|
||||||
|
|
||||||
await retry_with_fallback_models(self._prepare_prediction, ModelType.TURBO)
|
await retry_with_fallback_models(self._prepare_prediction, ModelType.TURBO)
|
||||||
|
@ -118,7 +118,7 @@ class PRReviewer:
|
|||||||
f"No files were changed since the [previous PR Review]({previous_review_url})")
|
f"No files were changed since the [previous PR Review]({previous_review_url})")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
if get_settings().config.publish_output:
|
if get_settings().config.publish_output and not get_settings().config.get('is_auto_command', False):
|
||||||
self.git_provider.publish_comment("Preparing review...", is_temporary=True)
|
self.git_provider.publish_comment("Preparing review...", is_temporary=True)
|
||||||
|
|
||||||
await retry_with_fallback_models(self._prepare_prediction)
|
await retry_with_fallback_models(self._prepare_prediction)
|
||||||
|
Reference in New Issue
Block a user