2023-07-06 00:21:08 +03:00
|
|
|
import copy
|
|
|
|
|
|
|
|
from jinja2 import Environment, StrictUndefined
|
|
|
|
|
2023-12-12 23:03:38 +08:00
|
|
|
from pr_agent.algo.ai_handlers.base_ai_handler import BaseAiHandler
|
2023-12-14 09:00:14 +02:00
|
|
|
from pr_agent.algo.ai_handlers.litellm_ai_handler import LiteLLMAIHandler
|
2023-07-23 16:16:36 +03:00
|
|
|
from pr_agent.algo.pr_processing import get_pr_diff, retry_with_fallback_models
|
2023-07-06 00:21:08 +03:00
|
|
|
from pr_agent.algo.token_handler import TokenHandler
|
2023-08-01 14:43:26 +03:00
|
|
|
from pr_agent.config_loader import get_settings
|
2023-07-06 00:21:08 +03:00
|
|
|
from pr_agent.git_providers import get_git_provider
|
2023-07-07 16:31:28 +03:00
|
|
|
from pr_agent.git_providers.git_provider import get_main_pr_language
|
2023-10-16 14:56:00 +03:00
|
|
|
from pr_agent.log import get_logger
|
2023-07-06 00:21:08 +03:00
|
|
|
|
|
|
|
|
|
|
|
class PRQuestions:
|
2023-12-14 09:00:14 +02:00
|
|
|
def __init__(self, pr_url: str, args=None, ai_handler: BaseAiHandler = LiteLLMAIHandler()):
|
2023-07-19 17:01:56 +03:00
|
|
|
question_str = self.parse_args(args)
|
2023-07-07 17:06:53 +03:00
|
|
|
self.git_provider = get_git_provider()(pr_url)
|
2023-07-07 16:31:28 +03:00
|
|
|
self.main_pr_language = get_main_pr_language(
|
2023-07-07 16:10:33 +03:00
|
|
|
self.git_provider.get_languages(), self.git_provider.get_files()
|
|
|
|
)
|
2023-12-10 00:25:25 +08:00
|
|
|
self.ai_handler = ai_handler
|
2023-07-06 00:21:08 +03:00
|
|
|
self.question_str = question_str
|
|
|
|
self.vars = {
|
|
|
|
"title": self.git_provider.pr.title,
|
|
|
|
"branch": self.git_provider.get_pr_branch(),
|
2023-07-16 21:47:48 +03:00
|
|
|
"description": self.git_provider.get_pr_description(),
|
2023-07-07 16:10:33 +03:00
|
|
|
"language": self.main_pr_language,
|
2023-07-07 16:15:51 +03:00
|
|
|
"diff": "", # empty diff for initial calculation
|
2023-07-06 00:21:08 +03:00
|
|
|
"questions": self.question_str,
|
2023-08-02 18:26:39 +03:00
|
|
|
"commit_messages_str": self.git_provider.get_commit_messages(),
|
2023-07-06 00:21:08 +03:00
|
|
|
}
|
|
|
|
self.token_handler = TokenHandler(self.git_provider.pr,
|
|
|
|
self.vars,
|
2023-08-01 14:43:26 +03:00
|
|
|
get_settings().pr_questions_prompt.system,
|
|
|
|
get_settings().pr_questions_prompt.user)
|
2023-07-06 00:21:08 +03:00
|
|
|
self.patches_diff = None
|
|
|
|
self.prediction = None
|
|
|
|
|
2023-07-19 17:01:56 +03:00
|
|
|
def parse_args(self, args):
|
|
|
|
if args and len(args) > 0:
|
|
|
|
question_str = " ".join(args)
|
|
|
|
else:
|
|
|
|
question_str = ""
|
|
|
|
return question_str
|
|
|
|
|
2023-08-01 14:43:26 +03:00
|
|
|
async def run(self):
|
2023-10-16 14:56:00 +03:00
|
|
|
get_logger().info('Answering a PR question...')
|
2023-08-01 14:43:26 +03:00
|
|
|
if get_settings().config.publish_output:
|
2023-07-06 08:44:08 +03:00
|
|
|
self.git_provider.publish_comment("Preparing answer...", is_temporary=True)
|
2023-07-23 16:16:36 +03:00
|
|
|
await retry_with_fallback_models(self._prepare_prediction)
|
2023-10-16 14:56:00 +03:00
|
|
|
get_logger().info('Preparing answer...')
|
2023-07-06 00:21:08 +03:00
|
|
|
pr_comment = self._prepare_pr_answer()
|
2023-08-01 14:43:26 +03:00
|
|
|
if get_settings().config.publish_output:
|
2023-10-16 14:56:00 +03:00
|
|
|
get_logger().info('Pushing answer...')
|
2023-07-06 00:21:08 +03:00
|
|
|
self.git_provider.publish_comment(pr_comment)
|
2023-07-06 08:44:08 +03:00
|
|
|
self.git_provider.remove_initial_comment()
|
2023-07-06 00:21:08 +03:00
|
|
|
return ""
|
|
|
|
|
2023-07-23 16:16:36 +03:00
|
|
|
async def _prepare_prediction(self, model: str):
|
2023-10-16 14:56:00 +03:00
|
|
|
get_logger().info('Getting PR diff...')
|
2023-07-23 16:16:36 +03:00
|
|
|
self.patches_diff = get_pr_diff(self.git_provider, self.token_handler, model)
|
2023-10-16 14:56:00 +03:00
|
|
|
get_logger().info('Getting AI prediction...')
|
2023-07-23 16:16:36 +03:00
|
|
|
self.prediction = await self._get_prediction(model)
|
|
|
|
|
|
|
|
async def _get_prediction(self, model: str):
|
2023-07-06 00:21:08 +03:00
|
|
|
variables = copy.deepcopy(self.vars)
|
|
|
|
variables["diff"] = self.patches_diff # update diff
|
|
|
|
environment = Environment(undefined=StrictUndefined)
|
2023-08-01 14:43:26 +03:00
|
|
|
system_prompt = environment.from_string(get_settings().pr_questions_prompt.system).render(variables)
|
|
|
|
user_prompt = environment.from_string(get_settings().pr_questions_prompt.user).render(variables)
|
|
|
|
if get_settings().config.verbosity_level >= 2:
|
2023-10-16 14:56:00 +03:00
|
|
|
get_logger().info(f"\nSystem prompt:\n{system_prompt}")
|
|
|
|
get_logger().info(f"\nUser prompt:\n{user_prompt}")
|
2023-07-06 00:21:08 +03:00
|
|
|
response, finish_reason = await self.ai_handler.chat_completion(model=model, temperature=0.2,
|
|
|
|
system=system_prompt, user=user_prompt)
|
|
|
|
return response
|
|
|
|
|
|
|
|
def _prepare_pr_answer(self) -> str:
|
2023-07-06 12:49:10 +03:00
|
|
|
answer_str = f"Question: {self.question_str}\n\n"
|
2023-07-06 16:39:56 +03:00
|
|
|
answer_str += f"Answer:\n{self.prediction.strip()}\n\n"
|
2023-08-01 14:43:26 +03:00
|
|
|
if get_settings().config.verbosity_level >= 2:
|
2023-10-16 14:56:00 +03:00
|
|
|
get_logger().info(f"answer_str:\n{answer_str}")
|
2023-07-06 00:21:08 +03:00
|
|
|
return answer_str
|