mirror of
https://github.com/qodo-ai/pr-agent.git
synced 2025-07-04 04:40:38 +08:00
@ -164,9 +164,10 @@ def omit_deletion_hunks(patch_lines) -> str:
|
|||||||
inside_hunk = True
|
inside_hunk = True
|
||||||
else:
|
else:
|
||||||
temp_hunk.append(line)
|
temp_hunk.append(line)
|
||||||
edit_type = line[0]
|
if line:
|
||||||
if edit_type == '+':
|
edit_type = line[0]
|
||||||
add_hunk = True
|
if edit_type == '+':
|
||||||
|
add_hunk = True
|
||||||
if inside_hunk and add_hunk:
|
if inside_hunk and add_hunk:
|
||||||
added_patched.extend(temp_hunk)
|
added_patched.extend(temp_hunk)
|
||||||
|
|
||||||
|
@ -121,7 +121,7 @@ class BitbucketProvider(GitProvider):
|
|||||||
|
|
||||||
def is_supported(self, capability: str) -> bool:
|
def is_supported(self, capability: str) -> bool:
|
||||||
if capability in ['get_issue_comments', 'publish_inline_comments', 'get_labels', 'gfm_markdown',
|
if capability in ['get_issue_comments', 'publish_inline_comments', 'get_labels', 'gfm_markdown',
|
||||||
'publish_file_comments']:
|
'publish_file_comments']:
|
||||||
return False
|
return False
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@ -142,7 +142,6 @@ class BitbucketProvider(GitProvider):
|
|||||||
self.git_files = [_gef_filename(diff) for diff in self.pr.diffstat()]
|
self.git_files = [_gef_filename(diff) for diff in self.pr.diffstat()]
|
||||||
return self.git_files
|
return self.git_files
|
||||||
|
|
||||||
|
|
||||||
def get_diff_files(self) -> list[FilePatchInfo]:
|
def get_diff_files(self) -> list[FilePatchInfo]:
|
||||||
if self.diff_files:
|
if self.diff_files:
|
||||||
return self.diff_files
|
return self.diff_files
|
||||||
@ -164,8 +163,25 @@ class BitbucketProvider(GitProvider):
|
|||||||
pass
|
pass
|
||||||
|
|
||||||
# get the pr patches
|
# get the pr patches
|
||||||
pr_patch = self.pr.diff()
|
try:
|
||||||
diff_split = ["diff --git" + x for x in pr_patch.split("diff --git") if x.strip()]
|
pr_patches = self.pr.diff()
|
||||||
|
except Exception as e:
|
||||||
|
# Try different encodings if UTF-8 fails
|
||||||
|
get_logger().warning(f"Failed to decode PR patch with utf-8, error: {e}")
|
||||||
|
encodings_to_try = ['iso-8859-1', 'latin-1', 'ascii', 'utf-16']
|
||||||
|
pr_patches = None
|
||||||
|
for encoding in encodings_to_try:
|
||||||
|
try:
|
||||||
|
pr_patches = self.pr.diff(encoding=encoding)
|
||||||
|
get_logger().info(f"Successfully decoded PR patch with encoding {encoding}")
|
||||||
|
break
|
||||||
|
except UnicodeDecodeError:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if pr_patches is None:
|
||||||
|
raise ValueError(f"Failed to decode PR patch with encodings {encodings_to_try}")
|
||||||
|
|
||||||
|
diff_split = ["diff --git" + x for x in pr_patches.split("diff --git") if x.strip()]
|
||||||
# filter all elements of 'diff_split' that are of indices in 'diffs_original' that are not in 'diffs'
|
# filter all elements of 'diff_split' that are of indices in 'diffs_original' that are not in 'diffs'
|
||||||
if len(diff_split) > len(diffs) and len(diffs_original) == len(diff_split):
|
if len(diff_split) > len(diffs) and len(diffs_original) == len(diff_split):
|
||||||
diff_split = [diff_split[i] for i in range(len(diff_split)) if diffs_original[i] in diffs]
|
diff_split = [diff_split[i] for i in range(len(diff_split)) if diffs_original[i] in diffs]
|
||||||
@ -196,12 +212,13 @@ class BitbucketProvider(GitProvider):
|
|||||||
diff_split[i] = ""
|
diff_split[i] = ""
|
||||||
get_logger().info(f"Disregarding empty diff for file {_gef_filename(diffs[i])}")
|
get_logger().info(f"Disregarding empty diff for file {_gef_filename(diffs[i])}")
|
||||||
else:
|
else:
|
||||||
get_logger().error(f"Error - failed to get diff for file {_gef_filename(diffs[i])}")
|
get_logger().warning(f"Bitbucket failed to get diff for file {_gef_filename(diffs[i])}")
|
||||||
diff_split[i] = ""
|
diff_split[i] = ""
|
||||||
|
|
||||||
invalid_files_names = []
|
invalid_files_names = []
|
||||||
diff_files = []
|
diff_files = []
|
||||||
counter_valid = 0
|
counter_valid = 0
|
||||||
|
# get full files
|
||||||
for index, diff in enumerate(diffs):
|
for index, diff in enumerate(diffs):
|
||||||
file_path = _gef_filename(diff)
|
file_path = _gef_filename(diff)
|
||||||
if not is_valid_file(file_path):
|
if not is_valid_file(file_path):
|
||||||
@ -316,11 +333,13 @@ class BitbucketProvider(GitProvider):
|
|||||||
get_logger().exception(f"Failed to remove comment, error: {e}")
|
get_logger().exception(f"Failed to remove comment, error: {e}")
|
||||||
|
|
||||||
# function to create_inline_comment
|
# function to create_inline_comment
|
||||||
def create_inline_comment(self, body: str, relevant_file: str, relevant_line_in_file: str, absolute_position: int = None):
|
def create_inline_comment(self, body: str, relevant_file: str, relevant_line_in_file: str,
|
||||||
|
absolute_position: int = None):
|
||||||
body = self.limit_output_characters(body, self.max_comment_length)
|
body = self.limit_output_characters(body, self.max_comment_length)
|
||||||
position, absolute_position = find_line_number_of_relevant_line_in_file(self.get_diff_files(),
|
position, absolute_position = find_line_number_of_relevant_line_in_file(self.get_diff_files(),
|
||||||
relevant_file.strip('`'),
|
relevant_file.strip('`'),
|
||||||
relevant_line_in_file, absolute_position)
|
relevant_line_in_file,
|
||||||
|
absolute_position)
|
||||||
if position == -1:
|
if position == -1:
|
||||||
if get_settings().config.verbosity_level >= 2:
|
if get_settings().config.verbosity_level >= 2:
|
||||||
get_logger().info(f"Could not find position for {relevant_file} {relevant_line_in_file}")
|
get_logger().info(f"Could not find position for {relevant_file} {relevant_line_in_file}")
|
||||||
@ -330,10 +349,9 @@ class BitbucketProvider(GitProvider):
|
|||||||
path = relevant_file.strip()
|
path = relevant_file.strip()
|
||||||
return dict(body=body, path=path, position=absolute_position) if subject_type == "LINE" else {}
|
return dict(body=body, path=path, position=absolute_position) if subject_type == "LINE" else {}
|
||||||
|
|
||||||
|
|
||||||
def publish_inline_comment(self, comment: str, from_line: int, file: str, original_suggestion=None):
|
def publish_inline_comment(self, comment: str, from_line: int, file: str, original_suggestion=None):
|
||||||
comment = self.limit_output_characters(comment, self.max_comment_length)
|
comment = self.limit_output_characters(comment, self.max_comment_length)
|
||||||
payload = json.dumps( {
|
payload = json.dumps({
|
||||||
"content": {
|
"content": {
|
||||||
"raw": comment,
|
"raw": comment,
|
||||||
},
|
},
|
||||||
@ -378,10 +396,10 @@ class BitbucketProvider(GitProvider):
|
|||||||
for comment in comments:
|
for comment in comments:
|
||||||
if 'position' in comment:
|
if 'position' in comment:
|
||||||
self.publish_inline_comment(comment['body'], comment['position'], comment['path'])
|
self.publish_inline_comment(comment['body'], comment['position'], comment['path'])
|
||||||
elif 'start_line' in comment: # multi-line comment
|
elif 'start_line' in comment: # multi-line comment
|
||||||
# note that bitbucket does not seem to support range - only a comment on a single line - https://community.developer.atlassian.com/t/api-post-endpoint-for-inline-pull-request-comments/60452
|
# note that bitbucket does not seem to support range - only a comment on a single line - https://community.developer.atlassian.com/t/api-post-endpoint-for-inline-pull-request-comments/60452
|
||||||
self.publish_inline_comment(comment['body'], comment['start_line'], comment['path'])
|
self.publish_inline_comment(comment['body'], comment['start_line'], comment['path'])
|
||||||
elif 'line' in comment: # single-line comment
|
elif 'line' in comment: # single-line comment
|
||||||
self.publish_inline_comment(comment['body'], comment['line'], comment['path'])
|
self.publish_inline_comment(comment['body'], comment['line'], comment['path'])
|
||||||
else:
|
else:
|
||||||
get_logger().error(f"Could not publish inline comment {comment}")
|
get_logger().error(f"Could not publish inline comment {comment}")
|
||||||
@ -465,7 +483,6 @@ class BitbucketProvider(GitProvider):
|
|||||||
except Exception:
|
except Exception:
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
|
|
||||||
def create_or_update_pr_file(self, file_path: str, branch: str, contents="", message="") -> None:
|
def create_or_update_pr_file(self, file_path: str, branch: str, contents="", message="") -> None:
|
||||||
url = (f"https://api.bitbucket.org/2.0/repositories/{self.workspace_slug}/{self.repo_slug}/src/")
|
url = (f"https://api.bitbucket.org/2.0/repositories/{self.workspace_slug}/{self.repo_slug}/src/")
|
||||||
if not message:
|
if not message:
|
||||||
@ -473,12 +490,12 @@ class BitbucketProvider(GitProvider):
|
|||||||
message = f"Update {file_path}"
|
message = f"Update {file_path}"
|
||||||
else:
|
else:
|
||||||
message = f"Create {file_path}"
|
message = f"Create {file_path}"
|
||||||
files={file_path: contents}
|
files = {file_path: contents}
|
||||||
data={
|
data = {
|
||||||
"message": message,
|
"message": message,
|
||||||
"branch": branch
|
"branch": branch
|
||||||
}
|
}
|
||||||
headers = {'Authorization':self.headers['Authorization']} if 'Authorization' in self.headers else {}
|
headers = {'Authorization': self.headers['Authorization']} if 'Authorization' in self.headers else {}
|
||||||
try:
|
try:
|
||||||
requests.request("POST", url, headers=headers, data=data, files=files)
|
requests.request("POST", url, headers=headers, data=data, files=files)
|
||||||
except Exception:
|
except Exception:
|
||||||
@ -503,7 +520,7 @@ class BitbucketProvider(GitProvider):
|
|||||||
"description": description,
|
"description": description,
|
||||||
"title": pr_title
|
"title": pr_title
|
||||||
|
|
||||||
})
|
})
|
||||||
|
|
||||||
response = requests.request("PUT", self.bitbucket_pull_request_api_url, headers=self.headers, data=payload)
|
response = requests.request("PUT", self.bitbucket_pull_request_api_url, headers=self.headers, data=payload)
|
||||||
try:
|
try:
|
||||||
|
@ -104,8 +104,8 @@ class PRCodeSuggestions:
|
|||||||
if not data:
|
if not data:
|
||||||
data = {"code_suggestions": []}
|
data = {"code_suggestions": []}
|
||||||
|
|
||||||
if (data is None or 'code_suggestions' not in data or not data['code_suggestions']
|
if (data is None or 'code_suggestions' not in data or not data['code_suggestions']
|
||||||
and get_settings().config.publish_output):
|
and get_settings().config.publish_output):
|
||||||
get_logger().warning('No code suggestions found for the PR.')
|
get_logger().warning('No code suggestions found for the PR.')
|
||||||
pr_body = "## PR Code Suggestions ✨\n\nNo code suggestions found for the PR."
|
pr_body = "## PR Code Suggestions ✨\n\nNo code suggestions found for the PR."
|
||||||
get_logger().debug(f"PR output", artifact=pr_body)
|
get_logger().debug(f"PR output", artifact=pr_body)
|
||||||
@ -528,7 +528,7 @@ class PRCodeSuggestions:
|
|||||||
for i, prediction in enumerate(predictions["code_suggestions"]):
|
for i, prediction in enumerate(predictions["code_suggestions"]):
|
||||||
try:
|
try:
|
||||||
if get_settings().pr_code_suggestions.self_reflect_on_suggestions:
|
if get_settings().pr_code_suggestions.self_reflect_on_suggestions:
|
||||||
score = int(prediction["score"])
|
score = int(prediction.get("score", 1))
|
||||||
if score >= score_threshold:
|
if score >= score_threshold:
|
||||||
data["code_suggestions"].append(prediction)
|
data["code_suggestions"].append(prediction)
|
||||||
else:
|
else:
|
||||||
|
Reference in New Issue
Block a user