Merge pull request #1193 from Codium-ai/tr/updates

Tr/updates
This commit is contained in:
Tal
2024-09-01 08:49:16 +03:00
committed by GitHub
3 changed files with 41 additions and 23 deletions

View File

@ -164,6 +164,7 @@ def omit_deletion_hunks(patch_lines) -> str:
inside_hunk = True inside_hunk = True
else: else:
temp_hunk.append(line) temp_hunk.append(line)
if line:
edit_type = line[0] edit_type = line[0]
if edit_type == '+': if edit_type == '+':
add_hunk = True add_hunk = True

View File

@ -142,7 +142,6 @@ class BitbucketProvider(GitProvider):
self.git_files = [_gef_filename(diff) for diff in self.pr.diffstat()] self.git_files = [_gef_filename(diff) for diff in self.pr.diffstat()]
return self.git_files return self.git_files
def get_diff_files(self) -> list[FilePatchInfo]: def get_diff_files(self) -> list[FilePatchInfo]:
if self.diff_files: if self.diff_files:
return self.diff_files return self.diff_files
@ -164,8 +163,25 @@ class BitbucketProvider(GitProvider):
pass pass
# get the pr patches # get the pr patches
pr_patch = self.pr.diff() try:
diff_split = ["diff --git" + x for x in pr_patch.split("diff --git") if x.strip()] pr_patches = self.pr.diff()
except Exception as e:
# Try different encodings if UTF-8 fails
get_logger().warning(f"Failed to decode PR patch with utf-8, error: {e}")
encodings_to_try = ['iso-8859-1', 'latin-1', 'ascii', 'utf-16']
pr_patches = None
for encoding in encodings_to_try:
try:
pr_patches = self.pr.diff(encoding=encoding)
get_logger().info(f"Successfully decoded PR patch with encoding {encoding}")
break
except UnicodeDecodeError:
continue
if pr_patches is None:
raise ValueError(f"Failed to decode PR patch with encodings {encodings_to_try}")
diff_split = ["diff --git" + x for x in pr_patches.split("diff --git") if x.strip()]
# filter all elements of 'diff_split' that are of indices in 'diffs_original' that are not in 'diffs' # filter all elements of 'diff_split' that are of indices in 'diffs_original' that are not in 'diffs'
if len(diff_split) > len(diffs) and len(diffs_original) == len(diff_split): if len(diff_split) > len(diffs) and len(diffs_original) == len(diff_split):
diff_split = [diff_split[i] for i in range(len(diff_split)) if diffs_original[i] in diffs] diff_split = [diff_split[i] for i in range(len(diff_split)) if diffs_original[i] in diffs]
@ -196,12 +212,13 @@ class BitbucketProvider(GitProvider):
diff_split[i] = "" diff_split[i] = ""
get_logger().info(f"Disregarding empty diff for file {_gef_filename(diffs[i])}") get_logger().info(f"Disregarding empty diff for file {_gef_filename(diffs[i])}")
else: else:
get_logger().error(f"Error - failed to get diff for file {_gef_filename(diffs[i])}") get_logger().warning(f"Bitbucket failed to get diff for file {_gef_filename(diffs[i])}")
diff_split[i] = "" diff_split[i] = ""
invalid_files_names = [] invalid_files_names = []
diff_files = [] diff_files = []
counter_valid = 0 counter_valid = 0
# get full files
for index, diff in enumerate(diffs): for index, diff in enumerate(diffs):
file_path = _gef_filename(diff) file_path = _gef_filename(diff)
if not is_valid_file(file_path): if not is_valid_file(file_path):
@ -316,11 +333,13 @@ class BitbucketProvider(GitProvider):
get_logger().exception(f"Failed to remove comment, error: {e}") get_logger().exception(f"Failed to remove comment, error: {e}")
# function to create_inline_comment # function to create_inline_comment
def create_inline_comment(self, body: str, relevant_file: str, relevant_line_in_file: str, absolute_position: int = None): def create_inline_comment(self, body: str, relevant_file: str, relevant_line_in_file: str,
absolute_position: int = None):
body = self.limit_output_characters(body, self.max_comment_length) body = self.limit_output_characters(body, self.max_comment_length)
position, absolute_position = find_line_number_of_relevant_line_in_file(self.get_diff_files(), position, absolute_position = find_line_number_of_relevant_line_in_file(self.get_diff_files(),
relevant_file.strip('`'), relevant_file.strip('`'),
relevant_line_in_file, absolute_position) relevant_line_in_file,
absolute_position)
if position == -1: if position == -1:
if get_settings().config.verbosity_level >= 2: if get_settings().config.verbosity_level >= 2:
get_logger().info(f"Could not find position for {relevant_file} {relevant_line_in_file}") get_logger().info(f"Could not find position for {relevant_file} {relevant_line_in_file}")
@ -330,7 +349,6 @@ class BitbucketProvider(GitProvider):
path = relevant_file.strip() path = relevant_file.strip()
return dict(body=body, path=path, position=absolute_position) if subject_type == "LINE" else {} return dict(body=body, path=path, position=absolute_position) if subject_type == "LINE" else {}
def publish_inline_comment(self, comment: str, from_line: int, file: str, original_suggestion=None): def publish_inline_comment(self, comment: str, from_line: int, file: str, original_suggestion=None):
comment = self.limit_output_characters(comment, self.max_comment_length) comment = self.limit_output_characters(comment, self.max_comment_length)
payload = json.dumps({ payload = json.dumps({
@ -465,7 +483,6 @@ class BitbucketProvider(GitProvider):
except Exception: except Exception:
return "" return ""
def create_or_update_pr_file(self, file_path: str, branch: str, contents="", message="") -> None: def create_or_update_pr_file(self, file_path: str, branch: str, contents="", message="") -> None:
url = (f"https://api.bitbucket.org/2.0/repositories/{self.workspace_slug}/{self.repo_slug}/src/") url = (f"https://api.bitbucket.org/2.0/repositories/{self.workspace_slug}/{self.repo_slug}/src/")
if not message: if not message:

View File

@ -528,7 +528,7 @@ class PRCodeSuggestions:
for i, prediction in enumerate(predictions["code_suggestions"]): for i, prediction in enumerate(predictions["code_suggestions"]):
try: try:
if get_settings().pr_code_suggestions.self_reflect_on_suggestions: if get_settings().pr_code_suggestions.self_reflect_on_suggestions:
score = int(prediction["score"]) score = int(prediction.get("score", 1))
if score >= score_threshold: if score >= score_threshold:
data["code_suggestions"].append(prediction) data["code_suggestions"].append(prediction)
else: else: