Add end-to-end tests for GitHub, GitLab, and Bitbucket apps; update temperature setting usage across tools

This commit is contained in:
mrT23
2024-07-27 17:19:32 +03:00
parent 3a77652660
commit ac247dbc2c
15 changed files with 352 additions and 18 deletions

View File

@ -89,8 +89,8 @@ class PRAddDocs:
if get_settings().config.verbosity_level >= 2:
get_logger().info(f"\nSystem prompt:\n{system_prompt}")
get_logger().info(f"\nUser prompt:\n{user_prompt}")
response, finish_reason = await self.ai_handler.chat_completion(model=model, temperature=0.2,
system=system_prompt, user=user_prompt)
response, finish_reason = await self.ai_handler.chat_completion(
model=model, temperature=get_settings().config.temperature, system=system_prompt, user=user_prompt)
return response

View File

@ -304,8 +304,8 @@ class PRCodeSuggestions:
environment = Environment(undefined=StrictUndefined)
system_prompt = environment.from_string(self.pr_code_suggestions_prompt_system).render(variables)
user_prompt = environment.from_string(get_settings().pr_code_suggestions_prompt.user).render(variables)
response, finish_reason = await self.ai_handler.chat_completion(model=model, temperature=0.2,
system=system_prompt, user=user_prompt)
response, finish_reason = await self.ai_handler.chat_completion(
model=model, temperature=get_settings().config.temperature, system=system_prompt, user=user_prompt)
# load suggestions from the AI response
data = self._prepare_pr_code_suggestions(response)

View File

@ -325,7 +325,7 @@ class PRDescription:
response, finish_reason = await self.ai_handler.chat_completion(
model=model,
temperature=0.2,
temperature=get_settings().config.temperature,
system=system_prompt,
user=user_prompt
)

View File

@ -142,7 +142,7 @@ class PRGenerateLabels:
response, finish_reason = await self.ai_handler.chat_completion(
model=model,
temperature=0.2,
temperature=get_settings().config.temperature,
system=system_prompt,
user=user_prompt
)

View File

@ -66,8 +66,8 @@ class PRInformationFromUser:
if get_settings().config.verbosity_level >= 2:
get_logger().info(f"\nSystem prompt:\n{system_prompt}")
get_logger().info(f"\nUser prompt:\n{user_prompt}")
response, finish_reason = await self.ai_handler.chat_completion(model=model, temperature=0.2,
system=system_prompt, user=user_prompt)
response, finish_reason = await self.ai_handler.chat_completion(
model=model, temperature=get_settings().config.temperature, system=system_prompt, user=user_prompt)
return response
def _prepare_pr_answer(self) -> str:

View File

@ -102,6 +102,6 @@ class PR_LineQuestions:
print(f"\nSystem prompt:\n{system_prompt}")
print(f"\nUser prompt:\n{user_prompt}")
response, finish_reason = await self.ai_handler.chat_completion(model=model, temperature=0.2,
system=system_prompt, user=user_prompt)
response, finish_reason = await self.ai_handler.chat_completion(
model=model, temperature=get_settings().config.temperature, system=system_prompt, user=user_prompt)
return response

View File

@ -108,12 +108,12 @@ class PRQuestions:
user_prompt = environment.from_string(get_settings().pr_questions_prompt.user).render(variables)
if 'img_path' in variables:
img_path = self.vars['img_path']
response, finish_reason = await self.ai_handler.chat_completion(model=model, temperature=0.2,
system=system_prompt, user=user_prompt,
img_path=img_path)
response, finish_reason = await (self.ai_handler.chat_completion
(model=model, temperature=get_settings().config.temperature,
system=system_prompt, user=user_prompt, img_path=img_path))
else:
response, finish_reason = await self.ai_handler.chat_completion(model=model, temperature=0.2,
system=system_prompt, user=user_prompt)
response, finish_reason = await self.ai_handler.chat_completion(
model=model, temperature=get_settings().config.temperature, system=system_prompt, user=user_prompt)
return response
def _prepare_pr_answer(self) -> str:

View File

@ -180,7 +180,7 @@ class PRReviewer:
response, finish_reason = await self.ai_handler.chat_completion(
model=model,
temperature=0.2,
temperature=get_settings().config.temperature,
system=system_prompt,
user=user_prompt
)

View File

@ -103,8 +103,8 @@ class PRUpdateChangelog:
environment = Environment(undefined=StrictUndefined)
system_prompt = environment.from_string(get_settings().pr_update_changelog_prompt.system).render(variables)
user_prompt = environment.from_string(get_settings().pr_update_changelog_prompt.user).render(variables)
response, finish_reason = await self.ai_handler.chat_completion(model=model, temperature=0.2,
system=system_prompt, user=user_prompt)
response, finish_reason = await self.ai_handler.chat_completion(
model=model, system=system_prompt, user=user_prompt, temperature=get_settings().config.temperature)
return response