mirror of
https://github.com/qodo-ai/pr-agent.git
synced 2025-07-14 01:30:37 +08:00
Refactor logging system to use custom logger across the codebase
This commit is contained in:
@ -1,10 +1,11 @@
|
||||
import json
|
||||
import logging
|
||||
from typing import Optional, Tuple
|
||||
from urllib.parse import urlparse
|
||||
|
||||
import os
|
||||
|
||||
from ..log import get_logger
|
||||
|
||||
AZURE_DEVOPS_AVAILABLE = True
|
||||
try:
|
||||
from msrest.authentication import BasicAuthentication
|
||||
@ -55,7 +56,7 @@ class AzureDevopsProvider:
|
||||
path=".pr_agent.toml")
|
||||
return contents
|
||||
except Exception as e:
|
||||
logging.exception("get repo settings error")
|
||||
get_logger().exception("get repo settings error")
|
||||
return ""
|
||||
|
||||
def get_files(self):
|
||||
@ -110,7 +111,7 @@ class AzureDevopsProvider:
|
||||
|
||||
new_file_content_str = new_file_content_str.content
|
||||
except Exception as error:
|
||||
logging.error("Failed to retrieve new file content of %s at version %s. Error: %s", file, version, str(error))
|
||||
get_logger().error("Failed to retrieve new file content of %s at version %s. Error: %s", file, version, str(error))
|
||||
new_file_content_str = ""
|
||||
|
||||
edit_type = EDIT_TYPE.MODIFIED
|
||||
@ -131,7 +132,7 @@ class AzureDevopsProvider:
|
||||
include_content=True)
|
||||
original_file_content_str = original_file_content_str.content
|
||||
except Exception as error:
|
||||
logging.error("Failed to retrieve original file content of %s at version %s. Error: %s", file, version, str(error))
|
||||
get_logger().error("Failed to retrieve original file content of %s at version %s. Error: %s", file, version, str(error))
|
||||
original_file_content_str = ""
|
||||
|
||||
patch = load_large_diff(file, new_file_content_str, original_file_content_str)
|
||||
@ -166,7 +167,7 @@ class AzureDevopsProvider:
|
||||
pull_request_id=self.pr_num,
|
||||
git_pull_request_to_update=updated_pr)
|
||||
except Exception as e:
|
||||
logging.exception(f"Could not update pull request {self.pr_num} description: {e}")
|
||||
get_logger().exception(f"Could not update pull request {self.pr_num} description: {e}")
|
||||
|
||||
def remove_initial_comment(self):
|
||||
return "" # not implemented yet
|
||||
|
@ -1,5 +1,4 @@
|
||||
import json
|
||||
import logging
|
||||
from typing import Optional, Tuple
|
||||
from urllib.parse import urlparse
|
||||
|
||||
@ -7,8 +6,9 @@ import requests
|
||||
from atlassian.bitbucket import Cloud
|
||||
from starlette_context import context
|
||||
|
||||
from ..algo.pr_processing import clip_tokens, find_line_number_of_relevant_line_in_file
|
||||
from ..algo.pr_processing import find_line_number_of_relevant_line_in_file
|
||||
from ..config_loader import get_settings
|
||||
from ..log import get_logger
|
||||
from .git_provider import FilePatchInfo, GitProvider
|
||||
|
||||
|
||||
@ -61,14 +61,14 @@ class BitbucketProvider(GitProvider):
|
||||
|
||||
if not relevant_lines_start or relevant_lines_start == -1:
|
||||
if get_settings().config.verbosity_level >= 2:
|
||||
logging.exception(
|
||||
get_logger().exception(
|
||||
f"Failed to publish code suggestion, relevant_lines_start is {relevant_lines_start}"
|
||||
)
|
||||
continue
|
||||
|
||||
if relevant_lines_end < relevant_lines_start:
|
||||
if get_settings().config.verbosity_level >= 2:
|
||||
logging.exception(
|
||||
get_logger().exception(
|
||||
f"Failed to publish code suggestion, "
|
||||
f"relevant_lines_end is {relevant_lines_end} and "
|
||||
f"relevant_lines_start is {relevant_lines_start}"
|
||||
@ -97,7 +97,7 @@ class BitbucketProvider(GitProvider):
|
||||
return True
|
||||
except Exception as e:
|
||||
if get_settings().config.verbosity_level >= 2:
|
||||
logging.error(f"Failed to publish code suggestion, error: {e}")
|
||||
get_logger().error(f"Failed to publish code suggestion, error: {e}")
|
||||
return False
|
||||
|
||||
def is_supported(self, capability: str) -> bool:
|
||||
@ -144,7 +144,7 @@ class BitbucketProvider(GitProvider):
|
||||
for comment in self.temp_comments:
|
||||
self.pr.delete(f"comments/{comment}")
|
||||
except Exception as e:
|
||||
logging.exception(f"Failed to remove temp comments, error: {e}")
|
||||
get_logger().exception(f"Failed to remove temp comments, error: {e}")
|
||||
|
||||
|
||||
# funtion to create_inline_comment
|
||||
@ -152,7 +152,7 @@ class BitbucketProvider(GitProvider):
|
||||
position, absolute_position = find_line_number_of_relevant_line_in_file(self.get_diff_files(), relevant_file.strip('`'), relevant_line_in_file)
|
||||
if position == -1:
|
||||
if get_settings().config.verbosity_level >= 2:
|
||||
logging.info(f"Could not find position for {relevant_file} {relevant_line_in_file}")
|
||||
get_logger().info(f"Could not find position for {relevant_file} {relevant_line_in_file}")
|
||||
subject_type = "FILE"
|
||||
else:
|
||||
subject_type = "LINE"
|
||||
|
@ -1,17 +1,16 @@
|
||||
import logging
|
||||
import os
|
||||
import re
|
||||
from collections import Counter
|
||||
from typing import List, Optional, Tuple
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from ..algo.language_handler import is_valid_file, language_extension_map
|
||||
from ..algo.pr_processing import clip_tokens
|
||||
from ..algo.utils import load_large_diff
|
||||
from ..config_loader import get_settings
|
||||
from .git_provider import EDIT_TYPE, FilePatchInfo, GitProvider, IncrementalPR
|
||||
from pr_agent.git_providers.codecommit_client import CodeCommitClient
|
||||
|
||||
from ..algo.language_handler import is_valid_file, language_extension_map
|
||||
from ..algo.utils import load_large_diff
|
||||
from .git_provider import EDIT_TYPE, FilePatchInfo, GitProvider
|
||||
from ..log import get_logger
|
||||
|
||||
|
||||
class PullRequestCCMimic:
|
||||
"""
|
||||
@ -166,7 +165,7 @@ class CodeCommitProvider(GitProvider):
|
||||
|
||||
def publish_comment(self, pr_comment: str, is_temporary: bool = False):
|
||||
if is_temporary:
|
||||
logging.info(pr_comment)
|
||||
get_logger().info(pr_comment)
|
||||
return
|
||||
|
||||
pr_comment = CodeCommitProvider._remove_markdown_html(pr_comment)
|
||||
@ -188,12 +187,12 @@ class CodeCommitProvider(GitProvider):
|
||||
for suggestion in code_suggestions:
|
||||
# Verify that each suggestion has the required keys
|
||||
if not all(key in suggestion for key in ["body", "relevant_file", "relevant_lines_start"]):
|
||||
logging.warning(f"Skipping code suggestion #{counter}: Each suggestion must have 'body', 'relevant_file', 'relevant_lines_start' keys")
|
||||
get_logger().warning(f"Skipping code suggestion #{counter}: Each suggestion must have 'body', 'relevant_file', 'relevant_lines_start' keys")
|
||||
continue
|
||||
|
||||
# Publish the code suggestion to CodeCommit
|
||||
try:
|
||||
logging.debug(f"Code Suggestion #{counter} in file: {suggestion['relevant_file']}: {suggestion['relevant_lines_start']}")
|
||||
get_logger().debug(f"Code Suggestion #{counter} in file: {suggestion['relevant_file']}: {suggestion['relevant_lines_start']}")
|
||||
self.codecommit_client.publish_comment(
|
||||
repo_name=self.repo_name,
|
||||
pr_number=self.pr_num,
|
||||
@ -296,11 +295,11 @@ class CodeCommitProvider(GitProvider):
|
||||
return self.codecommit_client.get_file(self.repo_name, settings_filename, self.pr.source_commit, optional=True)
|
||||
|
||||
def add_eyes_reaction(self, issue_comment_id: int) -> Optional[int]:
|
||||
logging.info("CodeCommit provider does not support eyes reaction yet")
|
||||
get_logger().info("CodeCommit provider does not support eyes reaction yet")
|
||||
return True
|
||||
|
||||
def remove_reaction(self, issue_comment_id: int, reaction_id: int) -> bool:
|
||||
logging.info("CodeCommit provider does not support removing reactions yet")
|
||||
get_logger().info("CodeCommit provider does not support removing reactions yet")
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
@ -366,7 +365,7 @@ class CodeCommitProvider(GitProvider):
|
||||
# TODO: implement support for multiple targets in one CodeCommit PR
|
||||
# for now, we are only using the first target in the PR
|
||||
if len(response.targets) > 1:
|
||||
logging.warning(
|
||||
get_logger().warning(
|
||||
"Multiple targets in one PR is not supported for CodeCommit yet. Continuing, using the first target only..."
|
||||
)
|
||||
|
||||
|
@ -1,5 +1,4 @@
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import pathlib
|
||||
import shutil
|
||||
@ -7,18 +6,16 @@ import subprocess
|
||||
import uuid
|
||||
from collections import Counter, namedtuple
|
||||
from pathlib import Path
|
||||
from tempfile import mkdtemp, NamedTemporaryFile
|
||||
from tempfile import NamedTemporaryFile, mkdtemp
|
||||
|
||||
import requests
|
||||
import urllib3.util
|
||||
from git import Repo
|
||||
|
||||
from pr_agent.config_loader import get_settings
|
||||
from pr_agent.git_providers.git_provider import GitProvider, FilePatchInfo, \
|
||||
EDIT_TYPE
|
||||
from pr_agent.git_providers.git_provider import EDIT_TYPE, FilePatchInfo, GitProvider
|
||||
from pr_agent.git_providers.local_git_provider import PullRequestMimic
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
from pr_agent.log import get_logger
|
||||
|
||||
|
||||
def _call(*command, **kwargs) -> (int, str, str):
|
||||
@ -33,42 +30,42 @@ def _call(*command, **kwargs) -> (int, str, str):
|
||||
|
||||
|
||||
def clone(url, directory):
|
||||
logger.info("Cloning %s to %s", url, directory)
|
||||
get_logger().info("Cloning %s to %s", url, directory)
|
||||
stdout = _call('git', 'clone', "--depth", "1", url, directory)
|
||||
logger.info(stdout)
|
||||
get_logger().info(stdout)
|
||||
|
||||
|
||||
def fetch(url, refspec, cwd):
|
||||
logger.info("Fetching %s %s", url, refspec)
|
||||
get_logger().info("Fetching %s %s", url, refspec)
|
||||
stdout = _call(
|
||||
'git', 'fetch', '--depth', '2', url, refspec,
|
||||
cwd=cwd
|
||||
)
|
||||
logger.info(stdout)
|
||||
get_logger().info(stdout)
|
||||
|
||||
|
||||
def checkout(cwd):
|
||||
logger.info("Checking out")
|
||||
get_logger().info("Checking out")
|
||||
stdout = _call('git', 'checkout', "FETCH_HEAD", cwd=cwd)
|
||||
logger.info(stdout)
|
||||
get_logger().info(stdout)
|
||||
|
||||
|
||||
def show(*args, cwd=None):
|
||||
logger.info("Show")
|
||||
get_logger().info("Show")
|
||||
return _call('git', 'show', *args, cwd=cwd)
|
||||
|
||||
|
||||
def diff(*args, cwd=None):
|
||||
logger.info("Diff")
|
||||
get_logger().info("Diff")
|
||||
patch = _call('git', 'diff', *args, cwd=cwd)
|
||||
if not patch:
|
||||
logger.warning("No changes found")
|
||||
get_logger().warning("No changes found")
|
||||
return
|
||||
return patch
|
||||
|
||||
|
||||
def reset_local_changes(cwd):
|
||||
logger.info("Reset local changes")
|
||||
get_logger().info("Reset local changes")
|
||||
_call('git', 'checkout', "--force", cwd=cwd)
|
||||
|
||||
|
||||
|
@ -1,4 +1,3 @@
|
||||
import logging
|
||||
from abc import ABC, abstractmethod
|
||||
from dataclasses import dataclass
|
||||
|
||||
@ -6,6 +5,8 @@ from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
from typing import Optional
|
||||
|
||||
from pr_agent.log import get_logger
|
||||
|
||||
|
||||
class EDIT_TYPE(Enum):
|
||||
ADDED = 1
|
||||
@ -136,7 +137,7 @@ def get_main_pr_language(languages, files) -> str:
|
||||
"""
|
||||
main_language_str = ""
|
||||
if not languages:
|
||||
logging.info("No languages detected")
|
||||
get_logger().info("No languages detected")
|
||||
return main_language_str
|
||||
|
||||
try:
|
||||
@ -172,7 +173,7 @@ def get_main_pr_language(languages, files) -> str:
|
||||
main_language_str = top_language
|
||||
|
||||
except Exception as e:
|
||||
logging.exception(e)
|
||||
get_logger().exception(e)
|
||||
pass
|
||||
|
||||
return main_language_str
|
||||
|
@ -1,20 +1,19 @@
|
||||
import logging
|
||||
import hashlib
|
||||
|
||||
from datetime import datetime
|
||||
from typing import Optional, Tuple, Any
|
||||
from typing import Optional, Tuple
|
||||
from urllib.parse import urlparse
|
||||
|
||||
from github import AppAuthentication, Auth, Github, GithubException, Reaction
|
||||
from github import AppAuthentication, Auth, Github, GithubException
|
||||
from retry import retry
|
||||
from starlette_context import context
|
||||
|
||||
from .git_provider import FilePatchInfo, GitProvider, IncrementalPR
|
||||
from ..algo.language_handler import is_valid_file
|
||||
from ..algo.pr_processing import clip_tokens, find_line_number_of_relevant_line_in_file
|
||||
from ..algo.utils import load_large_diff
|
||||
from ..algo.pr_processing import find_line_number_of_relevant_line_in_file, clip_tokens
|
||||
from ..config_loader import get_settings
|
||||
from ..log import get_logger
|
||||
from ..servers.utils import RateLimitExceeded
|
||||
from .git_provider import FilePatchInfo, GitProvider, IncrementalPR
|
||||
|
||||
|
||||
class GithubProvider(GitProvider):
|
||||
@ -58,7 +57,7 @@ class GithubProvider(GitProvider):
|
||||
self.file_set = dict()
|
||||
for commit in self.incremental.commits_range:
|
||||
if commit.commit.message.startswith(f"Merge branch '{self._get_repo().default_branch}'"):
|
||||
logging.info(f"Skipping merge commit {commit.commit.message}")
|
||||
get_logger().info(f"Skipping merge commit {commit.commit.message}")
|
||||
continue
|
||||
self.file_set.update({file.filename: file for file in commit.files})
|
||||
|
||||
@ -130,7 +129,7 @@ class GithubProvider(GitProvider):
|
||||
return diff_files
|
||||
|
||||
except GithubException.RateLimitExceededException as e:
|
||||
logging.error(f"Rate limit exceeded for GitHub API. Original message: {e}")
|
||||
get_logger().error(f"Rate limit exceeded for GitHub API. Original message: {e}")
|
||||
raise RateLimitExceeded("Rate limit exceeded for GitHub API.") from e
|
||||
|
||||
def publish_description(self, pr_title: str, pr_body: str):
|
||||
@ -138,7 +137,7 @@ class GithubProvider(GitProvider):
|
||||
|
||||
def publish_comment(self, pr_comment: str, is_temporary: bool = False):
|
||||
if is_temporary and not get_settings().config.publish_output_progress:
|
||||
logging.debug(f"Skipping publish_comment for temporary comment: {pr_comment}")
|
||||
get_logger().debug(f"Skipping publish_comment for temporary comment: {pr_comment}")
|
||||
return
|
||||
response = self.pr.create_issue_comment(pr_comment)
|
||||
if hasattr(response, "user") and hasattr(response.user, "login"):
|
||||
@ -156,7 +155,7 @@ class GithubProvider(GitProvider):
|
||||
position, absolute_position = find_line_number_of_relevant_line_in_file(self.diff_files, relevant_file.strip('`'), relevant_line_in_file)
|
||||
if position == -1:
|
||||
if get_settings().config.verbosity_level >= 2:
|
||||
logging.info(f"Could not find position for {relevant_file} {relevant_line_in_file}")
|
||||
get_logger().info(f"Could not find position for {relevant_file} {relevant_line_in_file}")
|
||||
subject_type = "FILE"
|
||||
else:
|
||||
subject_type = "LINE"
|
||||
@ -179,13 +178,13 @@ class GithubProvider(GitProvider):
|
||||
|
||||
if not relevant_lines_start or relevant_lines_start == -1:
|
||||
if get_settings().config.verbosity_level >= 2:
|
||||
logging.exception(
|
||||
get_logger().exception(
|
||||
f"Failed to publish code suggestion, relevant_lines_start is {relevant_lines_start}")
|
||||
continue
|
||||
|
||||
if relevant_lines_end < relevant_lines_start:
|
||||
if get_settings().config.verbosity_level >= 2:
|
||||
logging.exception(f"Failed to publish code suggestion, "
|
||||
get_logger().exception(f"Failed to publish code suggestion, "
|
||||
f"relevant_lines_end is {relevant_lines_end} and "
|
||||
f"relevant_lines_start is {relevant_lines_start}")
|
||||
continue
|
||||
@ -212,7 +211,7 @@ class GithubProvider(GitProvider):
|
||||
return True
|
||||
except Exception as e:
|
||||
if get_settings().config.verbosity_level >= 2:
|
||||
logging.error(f"Failed to publish code suggestion, error: {e}")
|
||||
get_logger().error(f"Failed to publish code suggestion, error: {e}")
|
||||
return False
|
||||
|
||||
def remove_initial_comment(self):
|
||||
@ -221,7 +220,7 @@ class GithubProvider(GitProvider):
|
||||
if comment.is_temporary:
|
||||
comment.delete()
|
||||
except Exception as e:
|
||||
logging.exception(f"Failed to remove initial comment, error: {e}")
|
||||
get_logger().exception(f"Failed to remove initial comment, error: {e}")
|
||||
|
||||
def get_title(self):
|
||||
return self.pr.title
|
||||
@ -269,7 +268,7 @@ class GithubProvider(GitProvider):
|
||||
reaction = self.pr.get_issue_comment(issue_comment_id).create_reaction("eyes")
|
||||
return reaction.id
|
||||
except Exception as e:
|
||||
logging.exception(f"Failed to add eyes reaction, error: {e}")
|
||||
get_logger().exception(f"Failed to add eyes reaction, error: {e}")
|
||||
return None
|
||||
|
||||
def remove_reaction(self, issue_comment_id: int, reaction_id: int) -> bool:
|
||||
@ -277,7 +276,7 @@ class GithubProvider(GitProvider):
|
||||
self.pr.get_issue_comment(issue_comment_id).delete_reaction(reaction_id)
|
||||
return True
|
||||
except Exception as e:
|
||||
logging.exception(f"Failed to remove eyes reaction, error: {e}")
|
||||
get_logger().exception(f"Failed to remove eyes reaction, error: {e}")
|
||||
return False
|
||||
|
||||
|
||||
@ -396,13 +395,13 @@ class GithubProvider(GitProvider):
|
||||
"PUT", f"{self.pr.issue_url}/labels", input=post_parameters
|
||||
)
|
||||
except Exception as e:
|
||||
logging.exception(f"Failed to publish labels, error: {e}")
|
||||
get_logger().exception(f"Failed to publish labels, error: {e}")
|
||||
|
||||
def get_labels(self):
|
||||
try:
|
||||
return [label.name for label in self.pr.labels]
|
||||
except Exception as e:
|
||||
logging.exception(f"Failed to get labels, error: {e}")
|
||||
get_logger().exception(f"Failed to get labels, error: {e}")
|
||||
return []
|
||||
|
||||
def get_commit_messages(self):
|
||||
@ -444,7 +443,7 @@ class GithubProvider(GitProvider):
|
||||
return link
|
||||
except Exception as e:
|
||||
if get_settings().config.verbosity_level >= 2:
|
||||
logging.info(f"Failed adding line link, error: {e}")
|
||||
get_logger().info(f"Failed adding line link, error: {e}")
|
||||
|
||||
return ""
|
||||
|
||||
|
@ -1,5 +1,4 @@
|
||||
import hashlib
|
||||
import logging
|
||||
import re
|
||||
from typing import Optional, Tuple
|
||||
from urllib.parse import urlparse
|
||||
@ -12,8 +11,8 @@ from ..algo.pr_processing import clip_tokens, find_line_number_of_relevant_line_
|
||||
from ..algo.utils import load_large_diff
|
||||
from ..config_loader import get_settings
|
||||
from .git_provider import EDIT_TYPE, FilePatchInfo, GitProvider
|
||||
from ..log import get_logger
|
||||
|
||||
logger = logging.getLogger()
|
||||
|
||||
class DiffNotFoundError(Exception):
|
||||
"""Raised when the diff for a merge request cannot be found."""
|
||||
@ -59,7 +58,7 @@ class GitLabProvider(GitProvider):
|
||||
try:
|
||||
self.last_diff = self.mr.diffs.list(get_all=True)[-1]
|
||||
except IndexError as e:
|
||||
logger.error(f"Could not get diff for merge request {self.id_mr}")
|
||||
get_logger().error(f"Could not get diff for merge request {self.id_mr}")
|
||||
raise DiffNotFoundError(f"Could not get diff for merge request {self.id_mr}") from e
|
||||
|
||||
|
||||
@ -99,7 +98,7 @@ class GitLabProvider(GitProvider):
|
||||
if isinstance(new_file_content_str, bytes):
|
||||
new_file_content_str = bytes.decode(new_file_content_str, 'utf-8')
|
||||
except UnicodeDecodeError:
|
||||
logging.warning(
|
||||
get_logger().warning(
|
||||
f"Cannot decode file {diff['old_path']} or {diff['new_path']} in merge request {self.id_mr}")
|
||||
|
||||
edit_type = EDIT_TYPE.MODIFIED
|
||||
@ -135,7 +134,7 @@ class GitLabProvider(GitProvider):
|
||||
self.mr.description = pr_body
|
||||
self.mr.save()
|
||||
except Exception as e:
|
||||
logging.exception(f"Could not update merge request {self.id_mr} description: {e}")
|
||||
get_logger().exception(f"Could not update merge request {self.id_mr} description: {e}")
|
||||
|
||||
def publish_comment(self, mr_comment: str, is_temporary: bool = False):
|
||||
comment = self.mr.notes.create({'body': mr_comment})
|
||||
@ -157,12 +156,12 @@ class GitLabProvider(GitProvider):
|
||||
def send_inline_comment(self,body: str,edit_type: str,found: bool,relevant_file: str,relevant_line_in_file: int,
|
||||
source_line_no: int, target_file: str,target_line_no: int) -> None:
|
||||
if not found:
|
||||
logging.info(f"Could not find position for {relevant_file} {relevant_line_in_file}")
|
||||
get_logger().info(f"Could not find position for {relevant_file} {relevant_line_in_file}")
|
||||
else:
|
||||
# in order to have exact sha's we have to find correct diff for this change
|
||||
diff = self.get_relevant_diff(relevant_file, relevant_line_in_file)
|
||||
if diff is None:
|
||||
logger.error(f"Could not get diff for merge request {self.id_mr}")
|
||||
get_logger().error(f"Could not get diff for merge request {self.id_mr}")
|
||||
raise DiffNotFoundError(f"Could not get diff for merge request {self.id_mr}")
|
||||
pos_obj = {'position_type': 'text',
|
||||
'new_path': target_file.filename,
|
||||
@ -175,23 +174,23 @@ class GitLabProvider(GitProvider):
|
||||
else:
|
||||
pos_obj['new_line'] = target_line_no - 1
|
||||
pos_obj['old_line'] = source_line_no - 1
|
||||
logging.debug(f"Creating comment in {self.id_mr} with body {body} and position {pos_obj}")
|
||||
get_logger().debug(f"Creating comment in {self.id_mr} with body {body} and position {pos_obj}")
|
||||
self.mr.discussions.create({'body': body, 'position': pos_obj})
|
||||
|
||||
def get_relevant_diff(self, relevant_file: str, relevant_line_in_file: int) -> Optional[dict]:
|
||||
changes = self.mr.changes() # Retrieve the changes for the merge request once
|
||||
if not changes:
|
||||
logging.error('No changes found for the merge request.')
|
||||
get_logger().error('No changes found for the merge request.')
|
||||
return None
|
||||
all_diffs = self.mr.diffs.list(get_all=True)
|
||||
if not all_diffs:
|
||||
logging.error('No diffs found for the merge request.')
|
||||
get_logger().error('No diffs found for the merge request.')
|
||||
return None
|
||||
for diff in all_diffs:
|
||||
for change in changes['changes']:
|
||||
if change['new_path'] == relevant_file and relevant_line_in_file in change['diff']:
|
||||
return diff
|
||||
logging.debug(
|
||||
get_logger().debug(
|
||||
f'No relevant diff found for {relevant_file} {relevant_line_in_file}. Falling back to last diff.')
|
||||
return self.last_diff # fallback to last_diff if no relevant diff is found
|
||||
|
||||
@ -226,7 +225,7 @@ class GitLabProvider(GitProvider):
|
||||
self.send_inline_comment(body, edit_type, found, relevant_file, relevant_line_in_file, source_line_no,
|
||||
target_file, target_line_no)
|
||||
except Exception as e:
|
||||
logging.exception(f"Could not publish code suggestion:\nsuggestion: {suggestion}\nerror: {e}")
|
||||
get_logger().exception(f"Could not publish code suggestion:\nsuggestion: {suggestion}\nerror: {e}")
|
||||
|
||||
# note that we publish suggestions one-by-one. so, if one fails, the rest will still be published
|
||||
return True
|
||||
@ -290,7 +289,7 @@ class GitLabProvider(GitProvider):
|
||||
for comment in self.temp_comments:
|
||||
comment.delete()
|
||||
except Exception as e:
|
||||
logging.exception(f"Failed to remove temp comments, error: {e}")
|
||||
get_logger().exception(f"Failed to remove temp comments, error: {e}")
|
||||
|
||||
def get_title(self):
|
||||
return self.mr.title
|
||||
@ -358,7 +357,7 @@ class GitLabProvider(GitProvider):
|
||||
self.mr.labels = list(set(pr_types))
|
||||
self.mr.save()
|
||||
except Exception as e:
|
||||
logging.exception(f"Failed to publish labels, error: {e}")
|
||||
get_logger().exception(f"Failed to publish labels, error: {e}")
|
||||
|
||||
def publish_inline_comments(self, comments: list[dict]):
|
||||
pass
|
||||
@ -410,6 +409,6 @@ class GitLabProvider(GitProvider):
|
||||
return link
|
||||
except Exception as e:
|
||||
if get_settings().config.verbosity_level >= 2:
|
||||
logging.info(f"Failed adding line link, error: {e}")
|
||||
get_logger().info(f"Failed adding line link, error: {e}")
|
||||
|
||||
return ""
|
||||
|
@ -1,4 +1,3 @@
|
||||
import logging
|
||||
from collections import Counter
|
||||
from pathlib import Path
|
||||
from typing import List
|
||||
@ -7,6 +6,7 @@ from git import Repo
|
||||
|
||||
from pr_agent.config_loader import _find_repository_root, get_settings
|
||||
from pr_agent.git_providers.git_provider import EDIT_TYPE, FilePatchInfo, GitProvider
|
||||
from pr_agent.log import get_logger
|
||||
|
||||
|
||||
class PullRequestMimic:
|
||||
@ -49,7 +49,7 @@ class LocalGitProvider(GitProvider):
|
||||
"""
|
||||
Prepare the repository for PR-mimic generation.
|
||||
"""
|
||||
logging.debug('Preparing repository for PR-mimic generation...')
|
||||
get_logger().debug('Preparing repository for PR-mimic generation...')
|
||||
if self.repo.is_dirty():
|
||||
raise ValueError('The repository is not in a clean state. Please commit or stash pending changes.')
|
||||
if self.target_branch_name not in self.repo.heads:
|
||||
|
@ -1,5 +1,4 @@
|
||||
import copy
|
||||
import logging
|
||||
import os
|
||||
import tempfile
|
||||
|
||||
@ -7,6 +6,7 @@ from dynaconf import Dynaconf
|
||||
|
||||
from pr_agent.config_loader import get_settings
|
||||
from pr_agent.git_providers import get_git_provider
|
||||
from pr_agent.log import get_logger
|
||||
|
||||
|
||||
def apply_repo_settings(pr_url):
|
||||
@ -32,4 +32,4 @@ def apply_repo_settings(pr_url):
|
||||
try:
|
||||
os.remove(repo_settings_file)
|
||||
except Exception as e:
|
||||
logging.error(f"Failed to remove temporary settings file {repo_settings_file}", e)
|
||||
get_logger().error(f"Failed to remove temporary settings file {repo_settings_file}", e)
|
||||
|
Reference in New Issue
Block a user