mirror of
https://github.com/qodo-ai/pr-agent.git
synced 2025-07-17 11:00:39 +08:00
Merge remote-tracking branch 'origin/main' into tr/improve_tweaks
This commit is contained in:
@ -74,11 +74,11 @@ CodiumAI PR-Agent is an open-source tool to help efficiently review and handle p
|
|||||||
| | Ask | :white_check_mark: | :white_check_mark: | :white_check_mark: | :white_check_mark: |
|
| | Ask | :white_check_mark: | :white_check_mark: | :white_check_mark: | :white_check_mark: |
|
||||||
| | ⮑ [Ask on code lines](./docs/ASK.md#ask-lines) | :white_check_mark: | :white_check_mark: | | |
|
| | ⮑ [Ask on code lines](./docs/ASK.md#ask-lines) | :white_check_mark: | :white_check_mark: | | |
|
||||||
| | [Custom Suggestions](https://github.com/Codium-ai/pr-agent/blob/main/docs/CUSTOM_SUGGESTIONS.md) 💎 | :white_check_mark: | :white_check_mark: | :white_check_mark: | :white_check_mark: |
|
| | [Custom Suggestions](https://github.com/Codium-ai/pr-agent/blob/main/docs/CUSTOM_SUGGESTIONS.md) 💎 | :white_check_mark: | :white_check_mark: | :white_check_mark: | :white_check_mark: |
|
||||||
| | [Test](https://github.com/Codium-ai/pr-agent/blob/main/docs/TEST.md) 💎 | :white_check_mark: | :white_check_mark: | :white_check_mark: | :white_check_mark: |
|
| | [Test](https://github.com/Codium-ai/pr-agent/blob/main/docs/TEST.md) 💎 | :white_check_mark: | :white_check_mark: | | :white_check_mark: |
|
||||||
| | Reflect and Review | :white_check_mark: | :white_check_mark: | :white_check_mark: | :white_check_mark: |
|
| | Reflect and Review | :white_check_mark: | :white_check_mark: | :white_check_mark: | :white_check_mark: |
|
||||||
| | Update CHANGELOG.md | :white_check_mark: | :white_check_mark: | :white_check_mark: | :white_check_mark: |
|
| | Update CHANGELOG.md | :white_check_mark: | :white_check_mark: | :white_check_mark: | :white_check_mark: |
|
||||||
| | Find Similar Issue | :white_check_mark: | | | |
|
| | Find Similar Issue | :white_check_mark: | | | |
|
||||||
| | [Add PR Documentation](https://github.com/Codium-ai/pr-agent/blob/main/docs/ADD_DOCUMENTATION.md) 💎 | :white_check_mark: | :white_check_mark: | :white_check_mark: | :white_check_mark: |
|
| | [Add PR Documentation](https://github.com/Codium-ai/pr-agent/blob/main/docs/ADD_DOCUMENTATION.md) 💎 | :white_check_mark: | :white_check_mark: | | :white_check_mark: |
|
||||||
| | [Custom Labels](https://github.com/Codium-ai/pr-agent/blob/main/docs/DESCRIBE.md#handle-custom-labels-from-the-repos-labels-page-gem) 💎 | :white_check_mark: | :white_check_mark: | | :white_check_mark: |
|
| | [Custom Labels](https://github.com/Codium-ai/pr-agent/blob/main/docs/DESCRIBE.md#handle-custom-labels-from-the-repos-labels-page-gem) 💎 | :white_check_mark: | :white_check_mark: | | :white_check_mark: |
|
||||||
| | [Analyze](https://github.com/Codium-ai/pr-agent/blob/main/docs/Analyze.md) 💎 | :white_check_mark: | :white_check_mark: | | :white_check_mark: |
|
| | [Analyze](https://github.com/Codium-ai/pr-agent/blob/main/docs/Analyze.md) 💎 | :white_check_mark: | :white_check_mark: | | :white_check_mark: |
|
||||||
| | [CI Feedback](https://github.com/Codium-ai/pr-agent/blob/main/docs/CI_FEEDBACK.md) 💎 | :white_check_mark: | | | |
|
| | [CI Feedback](https://github.com/Codium-ai/pr-agent/blob/main/docs/CI_FEEDBACK.md) 💎 | :white_check_mark: | | | |
|
||||||
|
7
Usage.md
7
Usage.md
@ -174,6 +174,13 @@ To cancel the automatic run of all the tools, set:
|
|||||||
handle_pr_actions = []
|
handle_pr_actions = []
|
||||||
```
|
```
|
||||||
|
|
||||||
|
You can also disable automatic runs for PRs with specific titles, by setting the `ignore_pr_titles` parameter with the relevant regex. For example:
|
||||||
|
```
|
||||||
|
[github_app]
|
||||||
|
ignore_pr_title = ["^[Auto]", ".*ignore.*"]
|
||||||
|
```
|
||||||
|
will ignore PRs with titles that start with "Auto" or contain the word "ignore".
|
||||||
|
|
||||||
##### GitHub app automatic tools for push actions (commits to an open PR)
|
##### GitHub app automatic tools for push actions (commits to an open PR)
|
||||||
In addition to running automatic tools when a PR is opened, the GitHub app can also respond to new code that is pushed to an open PR.
|
In addition to running automatic tools when a PR is opened, the GitHub app can also respond to new code that is pushed to an open PR.
|
||||||
|
|
||||||
|
@ -3,7 +3,6 @@
|
|||||||
## Table of Contents
|
## Table of Contents
|
||||||
- [Overview](#overview)
|
- [Overview](#overview)
|
||||||
- [Configuration options](#configuration-options)
|
- [Configuration options](#configuration-options)
|
||||||
- [Summarize mode](#summarize-mode)
|
|
||||||
- [Usage Tips](#usage-tips)
|
- [Usage Tips](#usage-tips)
|
||||||
- [Extra instructions](#extra-instructions)
|
- [Extra instructions](#extra-instructions)
|
||||||
- [PR footprint - regular vs summarize mode](#pr-footprint---regular-vs-summarize-mode)
|
- [PR footprint - regular vs summarize mode](#pr-footprint---regular-vs-summarize-mode)
|
||||||
@ -16,7 +15,7 @@ The tool can be triggered automatically every time a new PR is [opened](https://
|
|||||||
/improve
|
/improve
|
||||||
```
|
```
|
||||||
|
|
||||||
### Summarized vs commitable code suggestions
|
### Summarized vs committable code suggestions
|
||||||
|
|
||||||
The code suggestions can be presented as a single comment (via `pr_code_suggestions.summarize=true`):
|
The code suggestions can be presented as a single comment (via `pr_code_suggestions.summarize=true`):
|
||||||
___
|
___
|
||||||
|
@ -74,6 +74,7 @@ class PRAgent:
|
|||||||
|
|
||||||
action = action.lstrip("/").lower()
|
action = action.lstrip("/").lower()
|
||||||
with get_logger().contextualize(command=action):
|
with get_logger().contextualize(command=action):
|
||||||
|
get_logger().info("PR-Agent request handler started", analytics=True)
|
||||||
if action == "reflect_and_review":
|
if action == "reflect_and_review":
|
||||||
get_settings().pr_reviewer.ask_and_reflect = True
|
get_settings().pr_reviewer.ask_and_reflect = True
|
||||||
if action == "answer":
|
if action == "answer":
|
||||||
|
@ -190,6 +190,8 @@ class GitProvider(ABC):
|
|||||||
def auto_approve(self) -> bool:
|
def auto_approve(self) -> bool:
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
def calc_pr_statistics(self, pull_request_data: dict):
|
||||||
|
return {}
|
||||||
|
|
||||||
|
|
||||||
def get_main_pr_language(languages, files) -> str:
|
def get_main_pr_language(languages, files) -> str:
|
||||||
|
@ -700,4 +700,25 @@ class GithubProvider(GitProvider):
|
|||||||
return False
|
return False
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
get_logger().exception(f"Failed to auto-approve, error: {e}")
|
get_logger().exception(f"Failed to auto-approve, error: {e}")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
def calc_pr_statistics(self, pull_request_data: dict):
|
||||||
|
try:
|
||||||
|
out = {}
|
||||||
|
from datetime import datetime
|
||||||
|
created_at = pull_request_data['created_at']
|
||||||
|
closed_at = pull_request_data['closed_at']
|
||||||
|
closed_at_datetime = datetime.strptime(closed_at, "%Y-%m-%dT%H:%M:%SZ")
|
||||||
|
created_at_datetime = datetime.strptime(created_at, "%Y-%m-%dT%H:%M:%SZ")
|
||||||
|
difference = closed_at_datetime - created_at_datetime
|
||||||
|
out['hours'] = difference.total_seconds() / 3600
|
||||||
|
out['commits'] = pull_request_data['commits']
|
||||||
|
out['comments'] = pull_request_data['comments']
|
||||||
|
out['review_comments'] = pull_request_data['review_comments']
|
||||||
|
out['changed_files'] = pull_request_data['changed_files']
|
||||||
|
out['additions'] = pull_request_data['additions']
|
||||||
|
out['deletions'] = pull_request_data['deletions']
|
||||||
|
except Exception as e:
|
||||||
|
get_logger().exception(f"Failed to calculate PR statistics, error: {e}")
|
||||||
|
return {}
|
||||||
|
return out
|
||||||
|
13
pr_agent/identity_providers/__init__.py
Normal file
13
pr_agent/identity_providers/__init__.py
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
from pr_agent.config_loader import get_settings
|
||||||
|
from pr_agent.identity_providers.default_identity_provider import DefaultIdentityProvider
|
||||||
|
|
||||||
|
_IDENTITY_PROVIDERS = {
|
||||||
|
'default': DefaultIdentityProvider
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def get_identity_provider():
|
||||||
|
identity_provider_id = get_settings().get("CONFIG.IDENTITY_PROVIDER", "default")
|
||||||
|
if identity_provider_id not in _IDENTITY_PROVIDERS:
|
||||||
|
raise ValueError(f"Unknown identity provider: {identity_provider_id}")
|
||||||
|
return _IDENTITY_PROVIDERS[identity_provider_id]()
|
9
pr_agent/identity_providers/default_identity_provider.py
Normal file
9
pr_agent/identity_providers/default_identity_provider.py
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
from pr_agent.identity_providers.identity_provider import Eligibility, IdentityProvider
|
||||||
|
|
||||||
|
|
||||||
|
class DefaultIdentityProvider(IdentityProvider):
|
||||||
|
def verify_eligibility(self, git_provider, git_provider_id, pr_url):
|
||||||
|
return Eligibility.ELIGIBLE
|
||||||
|
|
||||||
|
def inc_invocation_count(self, git_provider, git_provider_id):
|
||||||
|
pass
|
18
pr_agent/identity_providers/identity_provider.py
Normal file
18
pr_agent/identity_providers/identity_provider.py
Normal file
@ -0,0 +1,18 @@
|
|||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from enum import Enum
|
||||||
|
|
||||||
|
|
||||||
|
class Eligibility(Enum):
|
||||||
|
NOT_ELIGIBLE = 0
|
||||||
|
ELIGIBLE = 1
|
||||||
|
TRIAL = 2
|
||||||
|
|
||||||
|
|
||||||
|
class IdentityProvider(ABC):
|
||||||
|
@abstractmethod
|
||||||
|
def verify_eligibility(self, git_provider, git_provier_id, pr_url):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def inc_invocation_count(self, git_provider, git_provider_id):
|
||||||
|
pass
|
@ -22,6 +22,10 @@ def analytics_filter(record: dict) -> bool:
|
|||||||
return record.get("extra", {}).get("analytics", False)
|
return record.get("extra", {}).get("analytics", False)
|
||||||
|
|
||||||
|
|
||||||
|
def inv_analytics_filter(record: dict) -> bool:
|
||||||
|
return not record.get("extra", {}).get("analytics", False)
|
||||||
|
|
||||||
|
|
||||||
def setup_logger(level: str = "INFO", fmt: LoggingFormat = LoggingFormat.CONSOLE):
|
def setup_logger(level: str = "INFO", fmt: LoggingFormat = LoggingFormat.CONSOLE):
|
||||||
level: int = logging.getLevelName(level.upper())
|
level: int = logging.getLevelName(level.upper())
|
||||||
if type(level) is not int:
|
if type(level) is not int:
|
||||||
@ -31,6 +35,7 @@ def setup_logger(level: str = "INFO", fmt: LoggingFormat = LoggingFormat.CONSOLE
|
|||||||
logger.remove(None)
|
logger.remove(None)
|
||||||
logger.add(
|
logger.add(
|
||||||
sys.stdout,
|
sys.stdout,
|
||||||
|
filter=inv_analytics_filter,
|
||||||
level=level,
|
level=level,
|
||||||
format="{message}",
|
format="{message}",
|
||||||
colorize=False,
|
colorize=False,
|
||||||
@ -38,7 +43,7 @@ def setup_logger(level: str = "INFO", fmt: LoggingFormat = LoggingFormat.CONSOLE
|
|||||||
)
|
)
|
||||||
elif fmt == LoggingFormat.CONSOLE: # does not print the 'extra' fields
|
elif fmt == LoggingFormat.CONSOLE: # does not print the 'extra' fields
|
||||||
logger.remove(None)
|
logger.remove(None)
|
||||||
logger.add(sys.stdout, level=level, colorize=True)
|
logger.add(sys.stdout, level=level, colorize=True, filter=inv_analytics_filter)
|
||||||
|
|
||||||
log_folder = get_settings().get("CONFIG.ANALYTICS_FOLDER", "")
|
log_folder = get_settings().get("CONFIG.ANALYTICS_FOLDER", "")
|
||||||
if log_folder:
|
if log_folder:
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
import base64
|
||||||
import copy
|
import copy
|
||||||
import hashlib
|
import hashlib
|
||||||
import json
|
import json
|
||||||
@ -17,6 +18,8 @@ from starlette_context.middleware import RawContextMiddleware
|
|||||||
from pr_agent.agent.pr_agent import PRAgent
|
from pr_agent.agent.pr_agent import PRAgent
|
||||||
from pr_agent.config_loader import get_settings, global_settings
|
from pr_agent.config_loader import get_settings, global_settings
|
||||||
from pr_agent.git_providers.utils import apply_repo_settings
|
from pr_agent.git_providers.utils import apply_repo_settings
|
||||||
|
from pr_agent.identity_providers import get_identity_provider
|
||||||
|
from pr_agent.identity_providers.identity_provider import Eligibility
|
||||||
from pr_agent.log import LoggingFormat, get_logger, setup_logger
|
from pr_agent.log import LoggingFormat, get_logger, setup_logger
|
||||||
from pr_agent.secret_providers import get_secret_provider
|
from pr_agent.secret_providers import get_secret_provider
|
||||||
from pr_agent.servers.github_action_runner import get_setting_or_env, is_true
|
from pr_agent.servers.github_action_runner import get_setting_or_env, is_true
|
||||||
@ -80,11 +83,27 @@ async def handle_github_webhooks(background_tasks: BackgroundTasks, request: Req
|
|||||||
get_logger().debug(data)
|
get_logger().debug(data)
|
||||||
async def inner():
|
async def inner():
|
||||||
try:
|
try:
|
||||||
owner = data["data"]["repository"]["owner"]["username"]
|
try:
|
||||||
|
if data["data"]["actor"]["type"] != "user":
|
||||||
|
return "OK"
|
||||||
|
except KeyError:
|
||||||
|
get_logger().error("Failed to get actor type, check previous logs, this shouldn't happen.")
|
||||||
|
try:
|
||||||
|
owner = data["data"]["repository"]["owner"]["username"]
|
||||||
|
except Exception as e:
|
||||||
|
get_logger().error(f"Failed to get owner, will continue: {e}")
|
||||||
|
owner = "unknown"
|
||||||
|
sender_id = data["data"]["actor"]["account_id"]
|
||||||
log_context["sender"] = owner
|
log_context["sender"] = owner
|
||||||
secrets = json.loads(secret_provider.get_secret(owner))
|
log_context["sender_id"] = sender_id
|
||||||
|
jwt_parts = input_jwt.split(".")
|
||||||
|
claim_part = jwt_parts[1]
|
||||||
|
claim_part += "=" * (-len(claim_part) % 4)
|
||||||
|
decoded_claims = base64.urlsafe_b64decode(claim_part)
|
||||||
|
claims = json.loads(decoded_claims)
|
||||||
|
client_key = claims["iss"]
|
||||||
|
secrets = json.loads(secret_provider.get_secret(client_key))
|
||||||
shared_secret = secrets["shared_secret"]
|
shared_secret = secrets["shared_secret"]
|
||||||
client_key = secrets["client_key"]
|
|
||||||
jwt.decode(input_jwt, shared_secret, audience=client_key, algorithms=["HS256"])
|
jwt.decode(input_jwt, shared_secret, audience=client_key, algorithms=["HS256"])
|
||||||
bearer_token = await get_bearer_token(shared_secret, client_key)
|
bearer_token = await get_bearer_token(shared_secret, client_key)
|
||||||
context['bitbucket_bearer_token'] = bearer_token
|
context['bitbucket_bearer_token'] = bearer_token
|
||||||
@ -98,15 +117,17 @@ async def handle_github_webhooks(background_tasks: BackgroundTasks, request: Req
|
|||||||
if pr_url:
|
if pr_url:
|
||||||
with get_logger().contextualize(**log_context):
|
with get_logger().contextualize(**log_context):
|
||||||
apply_repo_settings(pr_url)
|
apply_repo_settings(pr_url)
|
||||||
auto_review = get_setting_or_env("BITBUCKET_APP.AUTO_REVIEW", None)
|
if get_identity_provider().verify_eligibility("bitbucket",
|
||||||
if auto_review is None or is_true(auto_review): # by default, auto review is enabled
|
sender_id, pr_url) is not Eligibility.NOT_ELIGIBLE:
|
||||||
await PRReviewer(pr_url).run()
|
auto_review = get_setting_or_env("BITBUCKET_APP.AUTO_REVIEW", None)
|
||||||
auto_improve = get_setting_or_env("BITBUCKET_APP.AUTO_IMPROVE", None)
|
if auto_review is None or is_true(auto_review): # by default, auto review is enabled
|
||||||
if is_true(auto_improve): # by default, auto improve is disabled
|
await PRReviewer(pr_url).run()
|
||||||
await PRCodeSuggestions(pr_url).run()
|
auto_improve = get_setting_or_env("BITBUCKET_APP.AUTO_IMPROVE", None)
|
||||||
auto_describe = get_setting_or_env("BITBUCKET_APP.AUTO_DESCRIBE", None)
|
if is_true(auto_improve): # by default, auto improve is disabled
|
||||||
if is_true(auto_describe): # by default, auto describe is disabled
|
await PRCodeSuggestions(pr_url).run()
|
||||||
await PRDescription(pr_url).run()
|
auto_describe = get_setting_or_env("BITBUCKET_APP.AUTO_DESCRIBE", None)
|
||||||
|
if is_true(auto_describe): # by default, auto describe is disabled
|
||||||
|
await PRDescription(pr_url).run()
|
||||||
# with get_logger().contextualize(**log_context):
|
# with get_logger().contextualize(**log_context):
|
||||||
# await agent.handle_request(pr_url, "review")
|
# await agent.handle_request(pr_url, "review")
|
||||||
elif event == "pullrequest:comment_created":
|
elif event == "pullrequest:comment_created":
|
||||||
@ -115,7 +136,9 @@ async def handle_github_webhooks(background_tasks: BackgroundTasks, request: Req
|
|||||||
log_context["event"] = "comment"
|
log_context["event"] = "comment"
|
||||||
comment_body = data["data"]["comment"]["content"]["raw"]
|
comment_body = data["data"]["comment"]["content"]["raw"]
|
||||||
with get_logger().contextualize(**log_context):
|
with get_logger().contextualize(**log_context):
|
||||||
await agent.handle_request(pr_url, comment_body)
|
if get_identity_provider().verify_eligibility("bitbucket",
|
||||||
|
sender_id, pr_url) is not Eligibility.NOT_ELIGIBLE:
|
||||||
|
await agent.handle_request(pr_url, comment_body)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
get_logger().error(f"Failed to handle webhook: {e}")
|
get_logger().error(f"Failed to handle webhook: {e}")
|
||||||
background_tasks.add_task(inner)
|
background_tasks.add_task(inner)
|
||||||
|
@ -3,7 +3,7 @@ import copy
|
|||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import uuid
|
import uuid
|
||||||
from typing import Any, Dict, List, Tuple
|
from typing import Any, Dict, Tuple
|
||||||
|
|
||||||
import uvicorn
|
import uvicorn
|
||||||
from fastapi import APIRouter, FastAPI, HTTPException, Request, Response
|
from fastapi import APIRouter, FastAPI, HTTPException, Request, Response
|
||||||
@ -17,11 +17,19 @@ from pr_agent.config_loader import get_settings, global_settings
|
|||||||
from pr_agent.git_providers import get_git_provider
|
from pr_agent.git_providers import get_git_provider
|
||||||
from pr_agent.git_providers.git_provider import IncrementalPR
|
from pr_agent.git_providers.git_provider import IncrementalPR
|
||||||
from pr_agent.git_providers.utils import apply_repo_settings
|
from pr_agent.git_providers.utils import apply_repo_settings
|
||||||
|
from pr_agent.identity_providers import get_identity_provider
|
||||||
|
from pr_agent.identity_providers.identity_provider import Eligibility
|
||||||
from pr_agent.log import LoggingFormat, get_logger, setup_logger
|
from pr_agent.log import LoggingFormat, get_logger, setup_logger
|
||||||
from pr_agent.servers.utils import DefaultDictWithTimeout, verify_signature
|
from pr_agent.servers.utils import DefaultDictWithTimeout, verify_signature
|
||||||
|
|
||||||
setup_logger(fmt=LoggingFormat.JSON)
|
setup_logger(fmt=LoggingFormat.JSON)
|
||||||
|
base_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
|
||||||
|
build_number_path = os.path.join(base_path, "build_number.txt")
|
||||||
|
if os.path.exists(build_number_path):
|
||||||
|
with open(build_number_path) as f:
|
||||||
|
build_number = f.read().strip()
|
||||||
|
else:
|
||||||
|
build_number = "unknown"
|
||||||
router = APIRouter()
|
router = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
@ -70,6 +78,7 @@ _pending_task_duplicate_push_conditions = DefaultDictWithTimeout(asyncio.locks.C
|
|||||||
async def handle_comments_on_pr(body: Dict[str, Any],
|
async def handle_comments_on_pr(body: Dict[str, Any],
|
||||||
event: str,
|
event: str,
|
||||||
sender: str,
|
sender: str,
|
||||||
|
sender_id: str,
|
||||||
action: str,
|
action: str,
|
||||||
log_context: Dict[str, Any],
|
log_context: Dict[str, Any],
|
||||||
agent: PRAgent):
|
agent: PRAgent):
|
||||||
@ -98,13 +107,15 @@ async def handle_comments_on_pr(body: Dict[str, Any],
|
|||||||
comment_id = body.get("comment", {}).get("id")
|
comment_id = body.get("comment", {}).get("id")
|
||||||
provider = get_git_provider()(pr_url=api_url)
|
provider = get_git_provider()(pr_url=api_url)
|
||||||
with get_logger().contextualize(**log_context):
|
with get_logger().contextualize(**log_context):
|
||||||
get_logger().info(f"Processing comment on PR {api_url=}, comment_body={comment_body}")
|
if get_identity_provider().verify_eligibility("github", sender_id, api_url) is not Eligibility.NOT_ELIGIBLE:
|
||||||
await agent.handle_request(api_url, comment_body,
|
get_logger().info(f"Processing comment on PR {api_url=}, comment_body={comment_body}")
|
||||||
notify=lambda: provider.add_eyes_reaction(comment_id, disable_eyes=disable_eyes))
|
await agent.handle_request(api_url, comment_body,
|
||||||
|
notify=lambda: provider.add_eyes_reaction(comment_id, disable_eyes=disable_eyes))
|
||||||
|
|
||||||
async def handle_new_pr_opened(body: Dict[str, Any],
|
async def handle_new_pr_opened(body: Dict[str, Any],
|
||||||
event: str,
|
event: str,
|
||||||
sender: str,
|
sender: str,
|
||||||
|
sender_id: str,
|
||||||
action: str,
|
action: str,
|
||||||
log_context: Dict[str, Any],
|
log_context: Dict[str, Any],
|
||||||
agent: PRAgent):
|
agent: PRAgent):
|
||||||
@ -123,11 +134,13 @@ async def handle_new_pr_opened(body: Dict[str, Any],
|
|||||||
get_logger().info(f"Invalid PR event: {action=} {api_url=}")
|
get_logger().info(f"Invalid PR event: {action=} {api_url=}")
|
||||||
return {}
|
return {}
|
||||||
if action in get_settings().github_app.handle_pr_actions: # ['opened', 'reopened', 'ready_for_review', 'review_requested']
|
if action in get_settings().github_app.handle_pr_actions: # ['opened', 'reopened', 'ready_for_review', 'review_requested']
|
||||||
await _perform_auto_commands_github("pr_commands", agent, body, api_url, log_context)
|
if get_identity_provider().verify_eligibility("github", sender_id, api_url) is not Eligibility.NOT_ELIGIBLE:
|
||||||
|
await _perform_auto_commands_github("pr_commands", agent, body, api_url, log_context)
|
||||||
|
|
||||||
async def handle_push_trigger_for_new_commits(body: Dict[str, Any],
|
async def handle_push_trigger_for_new_commits(body: Dict[str, Any],
|
||||||
event: str,
|
event: str,
|
||||||
sender: str,
|
sender: str,
|
||||||
|
sender_id: str,
|
||||||
action: str,
|
action: str,
|
||||||
log_context: Dict[str, Any],
|
log_context: Dict[str, Any],
|
||||||
agent: PRAgent):
|
agent: PRAgent):
|
||||||
@ -182,8 +195,9 @@ async def handle_push_trigger_for_new_commits(body: Dict[str, Any],
|
|||||||
True)).previous_review:
|
True)).previous_review:
|
||||||
get_logger().info(f"Skipping incremental review because there was no initial review for {api_url=} yet")
|
get_logger().info(f"Skipping incremental review because there was no initial review for {api_url=} yet")
|
||||||
return {}
|
return {}
|
||||||
get_logger().info(f"Performing incremental review for {api_url=} because of {event=} and {action=}")
|
if get_identity_provider().verify_eligibility("github", sender_id, api_url) is not Eligibility.NOT_ELIGIBLE:
|
||||||
await _perform_auto_commands_github("push_commands", agent, body, api_url, log_context)
|
get_logger().info(f"Performing incremental review for {api_url=} because of {event=} and {action=}")
|
||||||
|
await _perform_auto_commands_github("push_commands", agent, body, api_url, log_context)
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
# release the waiting task block
|
# release the waiting task block
|
||||||
@ -192,6 +206,18 @@ async def handle_push_trigger_for_new_commits(body: Dict[str, Any],
|
|||||||
_duplicate_push_triggers[api_url] -= 1
|
_duplicate_push_triggers[api_url] -= 1
|
||||||
|
|
||||||
|
|
||||||
|
def handle_closed_pr(body, event, action, log_context):
|
||||||
|
pull_request = body.get("pull_request", {})
|
||||||
|
is_merged = pull_request.get("merged", False)
|
||||||
|
if not is_merged:
|
||||||
|
return
|
||||||
|
api_url = pull_request.get("url", "")
|
||||||
|
pr_statistics = get_git_provider()(pr_url=api_url).calc_pr_statistics(pull_request)
|
||||||
|
with get_logger().contextualize(**log_context):
|
||||||
|
with get_logger().contextualize(pr_statistics=pr_statistics):
|
||||||
|
get_logger().info("PR-Agent statistics for closed PR", analytics=True)
|
||||||
|
|
||||||
|
|
||||||
async def handle_request(body: Dict[str, Any], event: str):
|
async def handle_request(body: Dict[str, Any], event: str):
|
||||||
"""
|
"""
|
||||||
Handle incoming GitHub webhook requests.
|
Handle incoming GitHub webhook requests.
|
||||||
@ -205,24 +231,29 @@ async def handle_request(body: Dict[str, Any], event: str):
|
|||||||
return {}
|
return {}
|
||||||
agent = PRAgent()
|
agent = PRAgent()
|
||||||
sender = body.get("sender", {}).get("login")
|
sender = body.get("sender", {}).get("login")
|
||||||
|
sender_id = body.get("sender", {}).get("id")
|
||||||
|
app_name = get_settings().get("CONFIG.APP_NAME", "Unknown")
|
||||||
log_context = {"action": action, "event": event, "sender": sender, "server_type": "github_app",
|
log_context = {"action": action, "event": event, "sender": sender, "server_type": "github_app",
|
||||||
"request_id": uuid.uuid4().hex}
|
"request_id": uuid.uuid4().hex, "build_number": build_number, "app_name": app_name}
|
||||||
|
|
||||||
# handle comments on PRs
|
# handle comments on PRs
|
||||||
if action == 'created':
|
if action == 'created':
|
||||||
get_logger().debug(f'Request body', artifact=body)
|
get_logger().debug(f'Request body', artifact=body)
|
||||||
await handle_comments_on_pr(body, event, sender, action, log_context, agent)
|
await handle_comments_on_pr(body, event, sender, sender_id, action, log_context, agent)
|
||||||
# handle new PRs
|
# handle new PRs
|
||||||
elif event == 'pull_request' and action != 'synchronize':
|
elif event == 'pull_request' and action != 'synchronize' and action != 'closed':
|
||||||
get_logger().debug(f'Request body', artifact=body)
|
get_logger().debug(f'Request body', artifact=body)
|
||||||
await handle_new_pr_opened(body, event, sender, action, log_context, agent)
|
await handle_new_pr_opened(body, event, sender, sender_id, action, log_context, agent)
|
||||||
# handle pull_request event with synchronize action - "push trigger" for new commits
|
# handle pull_request event with synchronize action - "push trigger" for new commits
|
||||||
elif event == 'pull_request' and action == 'synchronize':
|
elif event == 'pull_request' and action == 'synchronize':
|
||||||
get_logger().debug(f'Request body', artifact=body)
|
get_logger().debug(f'Request body', artifact=body)
|
||||||
await handle_push_trigger_for_new_commits(body, event, sender, action, log_context, agent)
|
await handle_push_trigger_for_new_commits(body, event, sender, sender_id, action, log_context, agent)
|
||||||
|
elif event == 'pull_request' and action == 'closed':
|
||||||
|
if get_settings().get("CONFIG.ANALYTICS_FOLDER", ""):
|
||||||
|
handle_closed_pr(body, event, action, log_context)
|
||||||
else:
|
else:
|
||||||
get_logger().info(f"event {event=} action {action=} does not require any handling")
|
get_logger().info(f"event {event=} action {action=} does not require any handling")
|
||||||
return {}
|
return {}
|
||||||
|
|
||||||
|
|
||||||
def handle_line_comments(body: Dict, comment_body: [str, Any]) -> str:
|
def handle_line_comments(body: Dict, comment_body: [str, Any]) -> str:
|
||||||
@ -273,7 +304,7 @@ async def _perform_auto_commands_github(commands_conf: str, agent: PRAgent, body
|
|||||||
other_args = update_settings_from_args(args)
|
other_args = update_settings_from_args(args)
|
||||||
new_command = ' '.join([command] + other_args)
|
new_command = ' '.join([command] + other_args)
|
||||||
with get_logger().contextualize(**log_context):
|
with get_logger().contextualize(**log_context):
|
||||||
get_logger().info(f"New PR opened. Performing auto command '{new_command}', for {api_url=}")
|
get_logger().info(f"{commands_conf}. Performing auto command '{new_command}', for {api_url=}")
|
||||||
await agent.handle_request(api_url, new_command)
|
await agent.handle_request(api_url, new_command)
|
||||||
|
|
||||||
|
|
||||||
|
@ -136,7 +136,7 @@ try_fix_invalid_inline_comments = true
|
|||||||
# these toggles allows running the github app from custom deployments
|
# these toggles allows running the github app from custom deployments
|
||||||
override_deployment_type = true
|
override_deployment_type = true
|
||||||
# settings for "pull_request" event
|
# settings for "pull_request" event
|
||||||
handle_pr_actions = ['opened', 'reopened', 'ready_for_review', 'review_requested']
|
handle_pr_actions = ['opened', 'reopened', 'ready_for_review']
|
||||||
pr_commands = [
|
pr_commands = [
|
||||||
"/describe --pr_description.add_original_user_description=true --pr_description.keep_original_user_title=true",
|
"/describe --pr_description.add_original_user_description=true --pr_description.keep_original_user_title=true",
|
||||||
"/review --pr_reviewer.num_code_suggestions=0",
|
"/review --pr_reviewer.num_code_suggestions=0",
|
||||||
|
@ -124,6 +124,12 @@ class PRCodeSuggestions:
|
|||||||
get_logger().error(f"Failed to generate code suggestions for PR, error: {e}")
|
get_logger().error(f"Failed to generate code suggestions for PR, error: {e}")
|
||||||
if self.progress_response:
|
if self.progress_response:
|
||||||
self.progress_response.delete()
|
self.progress_response.delete()
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
self.git_provider.remove_initial_comment()
|
||||||
|
self.git_provider.publish_comment(f"Failed to generate code suggestions for PR")
|
||||||
|
except Exception as e:
|
||||||
|
pass
|
||||||
|
|
||||||
async def _prepare_prediction(self, model: str):
|
async def _prepare_prediction(self, model: str):
|
||||||
self.patches_diff = get_pr_diff(self.git_provider,
|
self.patches_diff = get_pr_diff(self.git_provider,
|
||||||
|
@ -139,10 +139,6 @@ class PRGenerateLabels:
|
|||||||
system_prompt = environment.from_string(get_settings().pr_custom_labels_prompt.system).render(variables)
|
system_prompt = environment.from_string(get_settings().pr_custom_labels_prompt.system).render(variables)
|
||||||
user_prompt = environment.from_string(get_settings().pr_custom_labels_prompt.user).render(variables)
|
user_prompt = environment.from_string(get_settings().pr_custom_labels_prompt.user).render(variables)
|
||||||
|
|
||||||
if get_settings().config.verbosity_level >= 2:
|
|
||||||
get_logger().info(f"\nSystem prompt:\n{system_prompt}")
|
|
||||||
get_logger().info(f"\nUser prompt:\n{user_prompt}")
|
|
||||||
|
|
||||||
response, finish_reason = await self.ai_handler.chat_completion(
|
response, finish_reason = await self.ai_handler.chat_completion(
|
||||||
model=model,
|
model=model,
|
||||||
temperature=0.2,
|
temperature=0.2,
|
||||||
@ -150,9 +146,6 @@ class PRGenerateLabels:
|
|||||||
user=user_prompt
|
user=user_prompt
|
||||||
)
|
)
|
||||||
|
|
||||||
if get_settings().config.verbosity_level >= 2:
|
|
||||||
get_logger().info(f"\nAI response:\n{response}")
|
|
||||||
|
|
||||||
return response
|
return response
|
||||||
|
|
||||||
def _prepare_data(self):
|
def _prepare_data(self):
|
||||||
|
@ -9,6 +9,11 @@ class PRHelpMessage:
|
|||||||
|
|
||||||
async def run(self):
|
async def run(self):
|
||||||
try:
|
try:
|
||||||
|
if not self.git_provider.is_supported("gfm_markdown"):
|
||||||
|
self.git_provider.publish_comment(
|
||||||
|
"The `Help` tool requires gfm markdown, which is not supported by your code platform.")
|
||||||
|
return
|
||||||
|
|
||||||
get_logger().info('Getting PR Help Message...')
|
get_logger().info('Getting PR Help Message...')
|
||||||
relevant_configs = {'pr_help': dict(get_settings().pr_help),
|
relevant_configs = {'pr_help': dict(get_settings().pr_help),
|
||||||
'config': dict(get_settings().config)}
|
'config': dict(get_settings().config)}
|
||||||
|
@ -170,7 +170,6 @@ class PRReviewer:
|
|||||||
user=user_prompt
|
user=user_prompt
|
||||||
)
|
)
|
||||||
|
|
||||||
get_logger().debug(f"\nAI response:\n{response}")
|
|
||||||
return response
|
return response
|
||||||
|
|
||||||
def _prepare_pr_review(self) -> str:
|
def _prepare_pr_review(self) -> str:
|
||||||
|
@ -9,7 +9,7 @@ from pr_agent.algo.ai_handlers.litellm_ai_handler import LiteLLMAIHandler
|
|||||||
from pr_agent.algo.pr_processing import get_pr_diff, retry_with_fallback_models
|
from pr_agent.algo.pr_processing import get_pr_diff, retry_with_fallback_models
|
||||||
from pr_agent.algo.token_handler import TokenHandler
|
from pr_agent.algo.token_handler import TokenHandler
|
||||||
from pr_agent.config_loader import get_settings
|
from pr_agent.config_loader import get_settings
|
||||||
from pr_agent.git_providers import get_git_provider
|
from pr_agent.git_providers import get_git_provider, GithubProvider
|
||||||
from pr_agent.git_providers.git_provider import get_main_pr_language
|
from pr_agent.git_providers.git_provider import get_main_pr_language
|
||||||
from pr_agent.log import get_logger
|
from pr_agent.log import get_logger
|
||||||
|
|
||||||
@ -46,12 +46,19 @@ class PRUpdateChangelog:
|
|||||||
get_settings().pr_update_changelog_prompt.user)
|
get_settings().pr_update_changelog_prompt.user)
|
||||||
|
|
||||||
async def run(self):
|
async def run(self):
|
||||||
# assert type(self.git_provider) == GithubProvider, "Currently only Github is supported"
|
|
||||||
|
|
||||||
get_logger().info('Updating the changelog...')
|
get_logger().info('Updating the changelog...')
|
||||||
relevant_configs = {'pr_update_changelog': dict(get_settings().pr_update_changelog),
|
relevant_configs = {'pr_update_changelog': dict(get_settings().pr_update_changelog),
|
||||||
'config': dict(get_settings().config)}
|
'config': dict(get_settings().config)}
|
||||||
get_logger().debug("Relevant configs", artifacts=relevant_configs)
|
get_logger().debug("Relevant configs", artifacts=relevant_configs)
|
||||||
|
|
||||||
|
# currently only GitHub is supported for pushing changelog changes
|
||||||
|
if get_settings().pr_update_changelog.push_changelog_changes and type(self.git_provider) != GithubProvider:
|
||||||
|
get_logger().error("Pushing changelog changes is not currently supported for this code platform")
|
||||||
|
if get_settings().config.publish_output:
|
||||||
|
self.git_provider.publish_comment(
|
||||||
|
"Pushing changelog changes is not currently supported for this code platform")
|
||||||
|
return
|
||||||
|
|
||||||
if get_settings().config.publish_output:
|
if get_settings().config.publish_output:
|
||||||
self.git_provider.publish_comment("Preparing changelog updates...", is_temporary=True)
|
self.git_provider.publish_comment("Preparing changelog updates...", is_temporary=True)
|
||||||
|
|
||||||
|
Reference in New Issue
Block a user