mirror of
https://github.com/qodo-ai/pr-agent.git
synced 2025-07-05 13:20:39 +08:00
@ -11,6 +11,7 @@ from enum import Enum
|
|||||||
from typing import Any, List, Tuple
|
from typing import Any, List, Tuple
|
||||||
|
|
||||||
import yaml
|
import yaml
|
||||||
|
from pydantic import BaseModel
|
||||||
from starlette_context import context
|
from starlette_context import context
|
||||||
|
|
||||||
from pr_agent.algo import MAX_TOKENS
|
from pr_agent.algo import MAX_TOKENS
|
||||||
@ -19,6 +20,12 @@ from pr_agent.config_loader import get_settings, global_settings
|
|||||||
from pr_agent.algo.types import FilePatchInfo
|
from pr_agent.algo.types import FilePatchInfo
|
||||||
from pr_agent.log import get_logger
|
from pr_agent.log import get_logger
|
||||||
|
|
||||||
|
class Range(BaseModel):
|
||||||
|
line_start: int # should be 0-indexed
|
||||||
|
line_end: int
|
||||||
|
column_start: int = -1
|
||||||
|
column_end: int = -1
|
||||||
|
|
||||||
class ModelType(str, Enum):
|
class ModelType(str, Enum):
|
||||||
REGULAR = "regular"
|
REGULAR = "regular"
|
||||||
TURBO = "turbo"
|
TURBO = "turbo"
|
||||||
|
@ -130,6 +130,18 @@ class AzureDevopsProvider(GitProvider):
|
|||||||
def get_pr_description_full(self) -> str:
|
def get_pr_description_full(self) -> str:
|
||||||
return self.pr.description
|
return self.pr.description
|
||||||
|
|
||||||
|
def delete_comment(self, comment):
|
||||||
|
try:
|
||||||
|
self.azure_devops_client.delete_comment(
|
||||||
|
repository_id=self.repo_slug,
|
||||||
|
pull_request_id=self.pr_num,
|
||||||
|
thread_id=comment.thread_id,
|
||||||
|
comment_id=comment.id,
|
||||||
|
project=self.workspace_slug,
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
get_logger().exception(f"Failed to delete comment, error: {e}")
|
||||||
|
|
||||||
def edit_comment(self, comment, body: str):
|
def edit_comment(self, comment, body: str):
|
||||||
try:
|
try:
|
||||||
self.azure_devops_client.update_comment(
|
self.azure_devops_client.update_comment(
|
||||||
@ -525,10 +537,18 @@ class AzureDevopsProvider(GitProvider):
|
|||||||
def get_user_id(self):
|
def get_user_id(self):
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
|
|
||||||
def get_issue_comments(self):
|
def get_issue_comments(self):
|
||||||
raise NotImplementedError(
|
threads = self.azure_devops_client.get_threads(repository_id=self.repo_slug, pull_request_id=self.pr_num, project=self.workspace_slug)
|
||||||
"Azure DevOps provider does not support issue comments yet"
|
threads.reverse()
|
||||||
)
|
comment_list = []
|
||||||
|
for thread in threads:
|
||||||
|
for comment in thread.comments:
|
||||||
|
if comment.content and comment not in comment_list:
|
||||||
|
comment.body = comment.content
|
||||||
|
comment.thread_id = thread.id
|
||||||
|
comment_list.append(comment)
|
||||||
|
return comment_list
|
||||||
|
|
||||||
def add_eyes_reaction(self, issue_comment_id: int, disable_eyes: bool = False) -> Optional[int]:
|
def add_eyes_reaction(self, issue_comment_id: int, disable_eyes: bool = False) -> Optional[int]:
|
||||||
return True
|
return True
|
||||||
|
@ -3,10 +3,11 @@ from abc import ABC, abstractmethod
|
|||||||
# enum EDIT_TYPE (ADDED, DELETED, MODIFIED, RENAMED)
|
# enum EDIT_TYPE (ADDED, DELETED, MODIFIED, RENAMED)
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
|
from pr_agent.algo.utils import Range
|
||||||
from pr_agent.config_loader import get_settings
|
from pr_agent.config_loader import get_settings
|
||||||
from pr_agent.algo.types import FilePatchInfo
|
from pr_agent.algo.types import FilePatchInfo
|
||||||
from pr_agent.log import get_logger
|
from pr_agent.log import get_logger
|
||||||
|
MAX_FILES_ALLOWED_FULL = 50
|
||||||
|
|
||||||
class GitProvider(ABC):
|
class GitProvider(ABC):
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
@ -51,6 +52,12 @@ class GitProvider(ABC):
|
|||||||
def edit_comment(self, comment, body: str):
|
def edit_comment(self, comment, body: str):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
def edit_comment_from_comment_id(self, comment_id: int, body: str):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def get_comment_body_from_comment_id(self, comment_id: int) -> str:
|
||||||
|
pass
|
||||||
|
|
||||||
def reply_to_comment_from_comment_id(self, comment_id: int, body: str):
|
def reply_to_comment_from_comment_id(self, comment_id: int, body: str):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
@ -74,6 +81,7 @@ class GitProvider(ABC):
|
|||||||
# if the existing description wasn't generated by the pr-agent, just return it as-is
|
# if the existing description wasn't generated by the pr-agent, just return it as-is
|
||||||
if not self._is_generated_by_pr_agent(description_lowercase):
|
if not self._is_generated_by_pr_agent(description_lowercase):
|
||||||
get_logger().info(f"Existing description was not generated by the pr-agent")
|
get_logger().info(f"Existing description was not generated by the pr-agent")
|
||||||
|
self.user_description = description
|
||||||
return description
|
return description
|
||||||
|
|
||||||
# if the existing description was generated by the pr-agent, but it doesn't contain a user description,
|
# if the existing description was generated by the pr-agent, but it doesn't contain a user description,
|
||||||
@ -120,12 +128,18 @@ class GitProvider(ABC):
|
|||||||
def get_repo_settings(self):
|
def get_repo_settings(self):
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
def get_workspace_name(self):
|
||||||
|
return ""
|
||||||
|
|
||||||
def get_pr_id(self):
|
def get_pr_id(self):
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
def get_line_link(self, relevant_file: str, relevant_line_start: int, relevant_line_end: int = None) -> str:
|
def get_line_link(self, relevant_file: str, relevant_line_start: int, relevant_line_end: int = None) -> str:
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
|
def get_lines_link_original_file(self, filepath:str, component_range: Range) -> str:
|
||||||
|
return ""
|
||||||
|
|
||||||
#### comments operations ####
|
#### comments operations ####
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def publish_comment(self, pr_comment: str, is_temporary: bool = False):
|
def publish_comment(self, pr_comment: str, is_temporary: bool = False):
|
||||||
@ -166,6 +180,7 @@ class GitProvider(ABC):
|
|||||||
pass
|
pass
|
||||||
self.publish_comment(pr_comment)
|
self.publish_comment(pr_comment)
|
||||||
|
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def publish_inline_comment(self, body: str, relevant_file: str, relevant_line_in_file: str):
|
def publish_inline_comment(self, body: str, relevant_file: str, relevant_line_in_file: str):
|
||||||
pass
|
pass
|
||||||
@ -193,6 +208,9 @@ class GitProvider(ABC):
|
|||||||
def get_comment_url(self, comment) -> str:
|
def get_comment_url(self, comment) -> str:
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
|
def delete_comment(self, comment):
|
||||||
|
comment.delete()
|
||||||
|
|
||||||
#### labels operations ####
|
#### labels operations ####
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def publish_labels(self, labels):
|
def publish_labels(self, labels):
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
import itertools
|
||||||
import time
|
import time
|
||||||
import hashlib
|
import hashlib
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
@ -10,11 +11,11 @@ from starlette_context import context
|
|||||||
|
|
||||||
from ..algo.file_filter import filter_ignored
|
from ..algo.file_filter import filter_ignored
|
||||||
from ..algo.language_handler import is_valid_file
|
from ..algo.language_handler import is_valid_file
|
||||||
from ..algo.utils import PRReviewHeader, load_large_diff, clip_tokens, find_line_number_of_relevant_line_in_file
|
from ..algo.utils import PRReviewHeader, load_large_diff, clip_tokens, find_line_number_of_relevant_line_in_file, Range
|
||||||
from ..config_loader import get_settings
|
from ..config_loader import get_settings
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
from ..servers.utils import RateLimitExceeded
|
from ..servers.utils import RateLimitExceeded
|
||||||
from .git_provider import GitProvider, IncrementalPR
|
from .git_provider import GitProvider, IncrementalPR, MAX_FILES_ALLOWED_FULL
|
||||||
from pr_agent.algo.types import EDIT_TYPE, FilePatchInfo
|
from pr_agent.algo.types import EDIT_TYPE, FilePatchInfo
|
||||||
|
|
||||||
|
|
||||||
@ -164,18 +165,34 @@ class GithubProvider(GitProvider):
|
|||||||
|
|
||||||
diff_files = []
|
diff_files = []
|
||||||
invalid_files_names = []
|
invalid_files_names = []
|
||||||
|
counter_valid = 0
|
||||||
for file in files:
|
for file in files:
|
||||||
if not is_valid_file(file.filename):
|
if not is_valid_file(file.filename):
|
||||||
invalid_files_names.append(file.filename)
|
invalid_files_names.append(file.filename)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
new_file_content_str = self._get_pr_file_content(file, self.pr.head.sha) # communication with GitHub
|
|
||||||
patch = file.patch
|
patch = file.patch
|
||||||
|
|
||||||
|
# allow only a limited number of files to be fully loaded. We can manage the rest with diffs only
|
||||||
|
counter_valid += 1
|
||||||
|
avoid_load = False
|
||||||
|
if counter_valid >= MAX_FILES_ALLOWED_FULL and patch and not self.incremental.is_incremental:
|
||||||
|
avoid_load = True
|
||||||
|
if counter_valid == MAX_FILES_ALLOWED_FULL:
|
||||||
|
get_logger().info(f"Too many files in PR, will avoid loading full content for rest of files")
|
||||||
|
|
||||||
|
if avoid_load:
|
||||||
|
new_file_content_str = ""
|
||||||
|
else:
|
||||||
|
new_file_content_str = self._get_pr_file_content(file, self.pr.head.sha) # communication with GitHub
|
||||||
|
|
||||||
if self.incremental.is_incremental and self.unreviewed_files_set:
|
if self.incremental.is_incremental and self.unreviewed_files_set:
|
||||||
original_file_content_str = self._get_pr_file_content(file, self.incremental.last_seen_commit_sha)
|
original_file_content_str = self._get_pr_file_content(file, self.incremental.last_seen_commit_sha)
|
||||||
patch = load_large_diff(file.filename, new_file_content_str, original_file_content_str)
|
patch = load_large_diff(file.filename, new_file_content_str, original_file_content_str)
|
||||||
self.unreviewed_files_set[file.filename] = patch
|
self.unreviewed_files_set[file.filename] = patch
|
||||||
|
else:
|
||||||
|
if avoid_load:
|
||||||
|
original_file_content_str = ""
|
||||||
else:
|
else:
|
||||||
original_file_content_str = self._get_pr_file_content(file, self.pr.base.sha)
|
original_file_content_str = self._get_pr_file_content(file, self.pr.base.sha)
|
||||||
if not patch:
|
if not patch:
|
||||||
@ -427,6 +444,16 @@ class GithubProvider(GitProvider):
|
|||||||
def edit_comment(self, comment, body: str):
|
def edit_comment(self, comment, body: str):
|
||||||
comment.edit(body=body)
|
comment.edit(body=body)
|
||||||
|
|
||||||
|
def edit_comment_from_comment_id(self, comment_id: int, body: str):
|
||||||
|
try:
|
||||||
|
# self.pr.get_issue_comment(comment_id).edit(body)
|
||||||
|
headers, data_patch = self.pr._requester.requestJsonAndCheck(
|
||||||
|
"PATCH", f"{self.base_url}/repos/{self.repo}/issues/comments/{comment_id}",
|
||||||
|
input={"body": body}
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
get_logger().exception(f"Failed to edit comment, error: {e}")
|
||||||
|
|
||||||
def reply_to_comment_from_comment_id(self, comment_id: int, body: str):
|
def reply_to_comment_from_comment_id(self, comment_id: int, body: str):
|
||||||
try:
|
try:
|
||||||
# self.pr.get_issue_comment(comment_id).edit(body)
|
# self.pr.get_issue_comment(comment_id).edit(body)
|
||||||
@ -437,6 +464,50 @@ class GithubProvider(GitProvider):
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
get_logger().exception(f"Failed to reply comment, error: {e}")
|
get_logger().exception(f"Failed to reply comment, error: {e}")
|
||||||
|
|
||||||
|
def get_comment_body_from_comment_id(self, comment_id: int):
|
||||||
|
try:
|
||||||
|
# self.pr.get_issue_comment(comment_id).edit(body)
|
||||||
|
headers, data_patch = self.pr._requester.requestJsonAndCheck(
|
||||||
|
"GET", f"{self.base_url}/repos/{self.repo}/issues/comments/{comment_id}"
|
||||||
|
)
|
||||||
|
return data_patch.get("body","")
|
||||||
|
except Exception as e:
|
||||||
|
get_logger().exception(f"Failed to edit comment, error: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
def publish_file_comments(self, file_comments: list) -> bool:
|
||||||
|
try:
|
||||||
|
headers, existing_comments = self.pr._requester.requestJsonAndCheck(
|
||||||
|
"GET", f"{self.pr.url}/comments"
|
||||||
|
)
|
||||||
|
for comment in file_comments:
|
||||||
|
comment['commit_id'] = self.last_commit_id.sha
|
||||||
|
|
||||||
|
found = False
|
||||||
|
for existing_comment in existing_comments:
|
||||||
|
comment['commit_id'] = self.last_commit_id.sha
|
||||||
|
our_app_name = get_settings().get("GITHUB.APP_NAME", "")
|
||||||
|
same_comment_creator = False
|
||||||
|
if self.deployment_type == 'app':
|
||||||
|
same_comment_creator = our_app_name.lower() in existing_comment['user']['login'].lower()
|
||||||
|
elif self.deployment_type == 'user':
|
||||||
|
same_comment_creator = self.github_user_id == existing_comment['user']['login']
|
||||||
|
if existing_comment['subject_type'] == 'file' and comment['path'] == existing_comment['path'] and same_comment_creator:
|
||||||
|
headers, data_patch = self.pr._requester.requestJsonAndCheck(
|
||||||
|
"PATCH", f"{self.base_url}/repos/{self.repo}/pulls/comments/{existing_comment['id']}", input={"body":comment['body']}
|
||||||
|
)
|
||||||
|
found = True
|
||||||
|
break
|
||||||
|
if not found:
|
||||||
|
headers, data_post = self.pr._requester.requestJsonAndCheck(
|
||||||
|
"POST", f"{self.pr.url}/comments", input=comment
|
||||||
|
)
|
||||||
|
return True
|
||||||
|
except Exception as e:
|
||||||
|
if get_settings().config.verbosity_level >= 2:
|
||||||
|
get_logger().error(f"Failed to publish diffview file summary, error: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
def remove_initial_comment(self):
|
def remove_initial_comment(self):
|
||||||
try:
|
try:
|
||||||
for comment in getattr(self.pr, 'comments_list', []):
|
for comment in getattr(self.pr, 'comments_list', []):
|
||||||
@ -461,6 +532,11 @@ class GithubProvider(GitProvider):
|
|||||||
def get_pr_branch(self):
|
def get_pr_branch(self):
|
||||||
return self.pr.head.ref
|
return self.pr.head.ref
|
||||||
|
|
||||||
|
def get_pr_owner_id(self) -> str | None:
|
||||||
|
if not self.repo:
|
||||||
|
return None
|
||||||
|
return self.repo.split('/')[0]
|
||||||
|
|
||||||
def get_pr_description_full(self):
|
def get_pr_description_full(self):
|
||||||
return self.pr.body
|
return self.pr.body
|
||||||
|
|
||||||
@ -495,6 +571,9 @@ class GithubProvider(GitProvider):
|
|||||||
except Exception:
|
except Exception:
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
|
def get_workspace_name(self):
|
||||||
|
return self.repo.split('/')[0]
|
||||||
|
|
||||||
def add_eyes_reaction(self, issue_comment_id: int, disable_eyes: bool = False) -> Optional[int]:
|
def add_eyes_reaction(self, issue_comment_id: int, disable_eyes: bool = False) -> Optional[int]:
|
||||||
if disable_eyes:
|
if disable_eyes:
|
||||||
return None
|
return None
|
||||||
@ -673,7 +752,7 @@ class GithubProvider(GitProvider):
|
|||||||
|
|
||||||
def get_repo_labels(self):
|
def get_repo_labels(self):
|
||||||
labels = self.repo_obj.get_labels()
|
labels = self.repo_obj.get_labels()
|
||||||
return [label for label in labels]
|
return [label for label in itertools.islice(labels, 50)]
|
||||||
|
|
||||||
def get_commit_messages(self):
|
def get_commit_messages(self):
|
||||||
"""
|
"""
|
||||||
@ -728,6 +807,29 @@ class GithubProvider(GitProvider):
|
|||||||
link = f"{self.base_url_html}/{self.repo}/pull/{self.pr_num}/files#diff-{sha_file}R{relevant_line_start}"
|
link = f"{self.base_url_html}/{self.repo}/pull/{self.pr_num}/files#diff-{sha_file}R{relevant_line_start}"
|
||||||
return link
|
return link
|
||||||
|
|
||||||
|
def get_lines_link_original_file(self, filepath: str, component_range: Range) -> str:
|
||||||
|
"""
|
||||||
|
Returns the link to the original file on GitHub that corresponds to the given filepath and component range.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
filepath (str): The path of the file.
|
||||||
|
component_range (Range): The range of lines that represent the component.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: The link to the original file on GitHub.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
>>> filepath = "path/to/file.py"
|
||||||
|
>>> component_range = Range(line_start=10, line_end=20)
|
||||||
|
>>> link = get_lines_link_original_file(filepath, component_range)
|
||||||
|
>>> print(link)
|
||||||
|
"https://github.com/{repo}/blob/{commit_sha}/{filepath}/#L11-L21"
|
||||||
|
"""
|
||||||
|
line_start = component_range.line_start + 1
|
||||||
|
line_end = component_range.line_end + 1
|
||||||
|
link = (f"https://github.com/{self.repo}/blob/{self.last_commit_id.sha}/{filepath}/"
|
||||||
|
f"#L{line_start}-L{line_end}")
|
||||||
|
return link
|
||||||
|
|
||||||
def get_pr_id(self):
|
def get_pr_id(self):
|
||||||
try:
|
try:
|
||||||
|
@ -4,13 +4,14 @@ from typing import Optional, Tuple
|
|||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
|
|
||||||
import gitlab
|
import gitlab
|
||||||
|
import requests
|
||||||
from gitlab import GitlabGetError
|
from gitlab import GitlabGetError
|
||||||
|
|
||||||
from ..algo.file_filter import filter_ignored
|
from ..algo.file_filter import filter_ignored
|
||||||
from ..algo.language_handler import is_valid_file
|
from ..algo.language_handler import is_valid_file
|
||||||
from ..algo.utils import load_large_diff, clip_tokens, find_line_number_of_relevant_line_in_file
|
from ..algo.utils import load_large_diff, clip_tokens, find_line_number_of_relevant_line_in_file
|
||||||
from ..config_loader import get_settings
|
from ..config_loader import get_settings
|
||||||
from .git_provider import GitProvider
|
from .git_provider import GitProvider, MAX_FILES_ALLOWED_FULL
|
||||||
from pr_agent.algo.types import EDIT_TYPE, FilePatchInfo
|
from pr_agent.algo.types import EDIT_TYPE, FilePatchInfo
|
||||||
from ..log import get_logger
|
from ..log import get_logger
|
||||||
|
|
||||||
@ -45,7 +46,8 @@ class GitLabProvider(GitProvider):
|
|||||||
self.incremental = incremental
|
self.incremental = incremental
|
||||||
|
|
||||||
def is_supported(self, capability: str) -> bool:
|
def is_supported(self, capability: str) -> bool:
|
||||||
if capability in ['get_issue_comments', 'create_inline_comment', 'publish_inline_comments']: # gfm_markdown is supported in gitlab !
|
if capability in ['get_issue_comments', 'create_inline_comment', 'publish_inline_comments',
|
||||||
|
'publish_file_comments']: # gfm_markdown is supported in gitlab !
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@ -101,13 +103,23 @@ class GitLabProvider(GitProvider):
|
|||||||
|
|
||||||
diff_files = []
|
diff_files = []
|
||||||
invalid_files_names = []
|
invalid_files_names = []
|
||||||
|
counter_valid = 0
|
||||||
for diff in diffs:
|
for diff in diffs:
|
||||||
if not is_valid_file(diff['new_path']):
|
if not is_valid_file(diff['new_path']):
|
||||||
invalid_files_names.append(diff['new_path'])
|
invalid_files_names.append(diff['new_path'])
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
# allow only a limited number of files to be fully loaded. We can manage the rest with diffs only
|
||||||
|
counter_valid += 1
|
||||||
|
if counter_valid < MAX_FILES_ALLOWED_FULL or not diff['diff']:
|
||||||
original_file_content_str = self.get_pr_file_content(diff['old_path'], self.mr.diff_refs['base_sha'])
|
original_file_content_str = self.get_pr_file_content(diff['old_path'], self.mr.diff_refs['base_sha'])
|
||||||
new_file_content_str = self.get_pr_file_content(diff['new_path'], self.mr.diff_refs['head_sha'])
|
new_file_content_str = self.get_pr_file_content(diff['new_path'], self.mr.diff_refs['head_sha'])
|
||||||
|
else:
|
||||||
|
if counter_valid == MAX_FILES_ALLOWED_FULL:
|
||||||
|
get_logger().info(f"Too many files in PR, will avoid loading full content for rest of files")
|
||||||
|
original_file_content_str = ''
|
||||||
|
new_file_content_str = ''
|
||||||
|
|
||||||
try:
|
try:
|
||||||
if isinstance(original_file_content_str, bytes):
|
if isinstance(original_file_content_str, bytes):
|
||||||
original_file_content_str = bytes.decode(original_file_content_str, 'utf-8')
|
original_file_content_str = bytes.decode(original_file_content_str, 'utf-8')
|
||||||
@ -206,11 +218,11 @@ class GitLabProvider(GitProvider):
|
|||||||
raise NotImplementedError("Gitlab provider does not support publishing inline comments yet")
|
raise NotImplementedError("Gitlab provider does not support publishing inline comments yet")
|
||||||
|
|
||||||
def get_comment_body_from_comment_id(self, comment_id: int):
|
def get_comment_body_from_comment_id(self, comment_id: int):
|
||||||
comment = self.mr.notes.get(comment_id)
|
comment = self.mr.notes.get(comment_id).body
|
||||||
return comment
|
return comment
|
||||||
|
|
||||||
def send_inline_comment(self,body: str,edit_type: str,found: bool,relevant_file: str,relevant_line_in_file: int,
|
def send_inline_comment(self,body: str,edit_type: str,found: bool,relevant_file: str,relevant_line_in_file: int,
|
||||||
source_line_no: int, target_file: str,target_line_no: int) -> None:
|
source_line_no: int, target_file: str,target_line_no: int, original_suggestion) -> None:
|
||||||
if not found:
|
if not found:
|
||||||
get_logger().info(f"Could not find position for {relevant_file} {relevant_line_in_file}")
|
get_logger().info(f"Could not find position for {relevant_file} {relevant_line_in_file}")
|
||||||
else:
|
else:
|
||||||
@ -230,12 +242,46 @@ class GitLabProvider(GitProvider):
|
|||||||
else:
|
else:
|
||||||
pos_obj['new_line'] = target_line_no - 1
|
pos_obj['new_line'] = target_line_no - 1
|
||||||
pos_obj['old_line'] = source_line_no - 1
|
pos_obj['old_line'] = source_line_no - 1
|
||||||
get_logger().debug(f"Creating comment in {self.id_mr} with body {body} and position {pos_obj}")
|
get_logger().debug(f"Creating comment in MR {self.id_mr} with body {body} and position {pos_obj}")
|
||||||
try:
|
try:
|
||||||
self.mr.discussions.create({'body': body, 'position': pos_obj})
|
self.mr.discussions.create({'body': body, 'position': pos_obj})
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
get_logger().debug(
|
try:
|
||||||
f"Failed to create comment in {self.id_mr} with position {pos_obj} (probably not a '+' line)")
|
# fallback - create a general note on the file in the MR
|
||||||
|
line_start = original_suggestion['suggestion_orig_location']['start_line']
|
||||||
|
line_end = original_suggestion['suggestion_orig_location']['end_line']
|
||||||
|
old_code_snippet = original_suggestion['prev_code_snippet']
|
||||||
|
new_code_snippet = original_suggestion['new_code_snippet']
|
||||||
|
content = original_suggestion['suggestion_summary']
|
||||||
|
label = original_suggestion['category']
|
||||||
|
score = original_suggestion['score']
|
||||||
|
|
||||||
|
if hasattr(self, 'main_language'):
|
||||||
|
language = self.main_language
|
||||||
|
else:
|
||||||
|
language = ''
|
||||||
|
link = self.get_line_link(relevant_file, line_start, line_end)
|
||||||
|
body_fallback =f"**Suggestion:** {content} [{label}, importance: {score}]\n___\n"
|
||||||
|
body_fallback +=f"\n\nReplace lines ([{line_start}-{line_end}]({link}))\n\n```{language}\n{old_code_snippet}\n````\n\n"
|
||||||
|
body_fallback +=f"with\n\n```{language}\n{new_code_snippet}\n````"
|
||||||
|
body_fallback += f"\n\n___\n\n`(Cannot implement this suggestion directly, as gitlab API does not enable committing to a non -+ line in a PR)`"
|
||||||
|
|
||||||
|
# Create a general note on the file in the MR
|
||||||
|
self.mr.notes.create({
|
||||||
|
'body': body_fallback,
|
||||||
|
'position': {
|
||||||
|
'base_sha': diff.base_commit_sha,
|
||||||
|
'start_sha': diff.start_commit_sha,
|
||||||
|
'head_sha': diff.head_commit_sha,
|
||||||
|
'position_type': 'text',
|
||||||
|
'file_path': f'{target_file.filename}',
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
# get_logger().debug(
|
||||||
|
# f"Failed to create comment in MR {self.id_mr} with position {pos_obj} (probably not a '+' line)")
|
||||||
|
except Exception as e:
|
||||||
|
get_logger().exception(f"Failed to create comment in MR {self.id_mr} with position {pos_obj}: {e}")
|
||||||
|
|
||||||
def get_relevant_diff(self, relevant_file: str, relevant_line_in_file: int) -> Optional[dict]:
|
def get_relevant_diff(self, relevant_file: str, relevant_line_in_file: int) -> Optional[dict]:
|
||||||
changes = self.mr.changes() # Retrieve the changes for the merge request once
|
changes = self.mr.changes() # Retrieve the changes for the merge request once
|
||||||
@ -257,6 +303,7 @@ class GitLabProvider(GitProvider):
|
|||||||
def publish_code_suggestions(self, code_suggestions: list) -> bool:
|
def publish_code_suggestions(self, code_suggestions: list) -> bool:
|
||||||
for suggestion in code_suggestions:
|
for suggestion in code_suggestions:
|
||||||
try:
|
try:
|
||||||
|
original_suggestion = suggestion['original_suggestion']
|
||||||
body = suggestion['body']
|
body = suggestion['body']
|
||||||
relevant_file = suggestion['relevant_file']
|
relevant_file = suggestion['relevant_file']
|
||||||
relevant_lines_start = suggestion['relevant_lines_start']
|
relevant_lines_start = suggestion['relevant_lines_start']
|
||||||
@ -283,13 +330,16 @@ class GitLabProvider(GitProvider):
|
|||||||
edit_type = 'addition'
|
edit_type = 'addition'
|
||||||
|
|
||||||
self.send_inline_comment(body, edit_type, found, relevant_file, relevant_line_in_file, source_line_no,
|
self.send_inline_comment(body, edit_type, found, relevant_file, relevant_line_in_file, source_line_no,
|
||||||
target_file, target_line_no)
|
target_file, target_line_no, original_suggestion)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
get_logger().exception(f"Could not publish code suggestion:\nsuggestion: {suggestion}\nerror: {e}")
|
get_logger().exception(f"Could not publish code suggestion:\nsuggestion: {suggestion}\nerror: {e}")
|
||||||
|
|
||||||
# note that we publish suggestions one-by-one. so, if one fails, the rest will still be published
|
# note that we publish suggestions one-by-one. so, if one fails, the rest will still be published
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
def publish_file_comments(self, file_comments: list) -> bool:
|
||||||
|
pass
|
||||||
|
|
||||||
def search_line(self, relevant_file, relevant_line_in_file):
|
def search_line(self, relevant_file, relevant_line_in_file):
|
||||||
target_file = None
|
target_file = None
|
||||||
|
|
||||||
@ -380,6 +430,9 @@ class GitLabProvider(GitProvider):
|
|||||||
except Exception:
|
except Exception:
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
|
def get_workspace_name(self):
|
||||||
|
return self.id_project.split('/')[0]
|
||||||
|
|
||||||
def add_eyes_reaction(self, issue_comment_id: int, disable_eyes: bool = False) -> Optional[int]:
|
def add_eyes_reaction(self, issue_comment_id: int, disable_eyes: bool = False) -> Optional[int]:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
|
@ -11,7 +11,8 @@ from pr_agent.algo.pr_processing import get_pr_diff, get_pr_multi_diffs, retry_w
|
|||||||
from pr_agent.algo.token_handler import TokenHandler
|
from pr_agent.algo.token_handler import TokenHandler
|
||||||
from pr_agent.algo.utils import load_yaml, replace_code_tags, ModelType, show_relevant_configurations
|
from pr_agent.algo.utils import load_yaml, replace_code_tags, ModelType, show_relevant_configurations
|
||||||
from pr_agent.config_loader import get_settings
|
from pr_agent.config_loader import get_settings
|
||||||
from pr_agent.git_providers import get_git_provider, get_git_provider_with_context, GithubProvider, GitLabProvider
|
from pr_agent.git_providers import get_git_provider, get_git_provider_with_context, GithubProvider, GitLabProvider, \
|
||||||
|
AzureDevopsProvider
|
||||||
from pr_agent.git_providers.git_provider import get_main_pr_language
|
from pr_agent.git_providers.git_provider import get_main_pr_language
|
||||||
from pr_agent.log import get_logger
|
from pr_agent.log import get_logger
|
||||||
from pr_agent.servers.help import HelpMessage
|
from pr_agent.servers.help import HelpMessage
|
||||||
@ -176,6 +177,13 @@ class PRCodeSuggestions:
|
|||||||
final_update_message=True,
|
final_update_message=True,
|
||||||
max_previous_comments=4,
|
max_previous_comments=4,
|
||||||
progress_response=None):
|
progress_response=None):
|
||||||
|
if isinstance(self.git_provider, AzureDevopsProvider): # get_latest_commit_url is not supported yet
|
||||||
|
if progress_response:
|
||||||
|
self.git_provider.edit_comment(progress_response, pr_comment)
|
||||||
|
else:
|
||||||
|
self.git_provider.publish_comment(pr_comment)
|
||||||
|
return
|
||||||
|
|
||||||
history_header = f"#### Previous suggestions\n"
|
history_header = f"#### Previous suggestions\n"
|
||||||
last_commit_num = self.git_provider.get_latest_commit_url().split('/')[-1][:7]
|
last_commit_num = self.git_provider.get_latest_commit_url().split('/')[-1][:7]
|
||||||
latest_suggestion_header = f"Latest suggestions up to {last_commit_num}"
|
latest_suggestion_header = f"Latest suggestions up to {last_commit_num}"
|
||||||
@ -248,7 +256,7 @@ class PRCodeSuggestions:
|
|||||||
get_logger().info(f"Persistent mode - updating comment {comment_url} to latest {name} message")
|
get_logger().info(f"Persistent mode - updating comment {comment_url} to latest {name} message")
|
||||||
if progress_response: # publish to 'progress_response' comment, because it refreshes immediately
|
if progress_response: # publish to 'progress_response' comment, because it refreshes immediately
|
||||||
self.git_provider.edit_comment(progress_response, pr_comment_updated)
|
self.git_provider.edit_comment(progress_response, pr_comment_updated)
|
||||||
comment.delete()
|
self.git_provider.delete_comment(comment)
|
||||||
else:
|
else:
|
||||||
self.git_provider.edit_comment(comment, pr_comment_updated)
|
self.git_provider.edit_comment(comment, pr_comment_updated)
|
||||||
return
|
return
|
||||||
@ -424,7 +432,8 @@ class PRCodeSuggestions:
|
|||||||
body = f"**Suggestion:** {content} [{label}]\n```suggestion\n" + new_code_snippet + "\n```"
|
body = f"**Suggestion:** {content} [{label}]\n```suggestion\n" + new_code_snippet + "\n```"
|
||||||
code_suggestions.append({'body': body, 'relevant_file': relevant_file,
|
code_suggestions.append({'body': body, 'relevant_file': relevant_file,
|
||||||
'relevant_lines_start': relevant_lines_start,
|
'relevant_lines_start': relevant_lines_start,
|
||||||
'relevant_lines_end': relevant_lines_end})
|
'relevant_lines_end': relevant_lines_end,
|
||||||
|
'original_suggestion': d})
|
||||||
except Exception:
|
except Exception:
|
||||||
get_logger().info(f"Could not parse suggestion: {d}")
|
get_logger().info(f"Could not parse suggestion: {d}")
|
||||||
|
|
||||||
|
Reference in New Issue
Block a user