Merge remote-tracking branch 'origin/main'

This commit is contained in:
Ori Kotek
2023-07-20 22:00:24 +03:00
5 changed files with 20 additions and 4 deletions

View File

@ -9,6 +9,9 @@ Making pull requests less painful with an AI agent
[![GitHub license](https://img.shields.io/badge/License-Apache_2.0-blue.svg)](https://github.com/Codium-ai/pr-agent/blob/main/LICENSE)
[![Discord](https://badgen.net/badge/icon/discord?icon=discord&label&color=purple)](https://discord.com/channels/1057273017547378788/1126104260430528613)
<a href="https://github.com/Codium-ai/pr-agent/commits/main">
<img alt="GitHub" src="https://img.shields.io/github/last-commit/Codium-ai/pr-agent/main?style=for-the-badge" height="20">
</a>
</div>
<div style="text-align:left;">

View File

@ -1,12 +1,12 @@
import logging
import openai
from openai.error import APIError, Timeout, TryAgain
from openai.error import APIError, Timeout, TryAgain, RateLimitError
from retry import retry
from pr_agent.config_loader import settings
OPENAI_RETRIES=2
OPENAI_RETRIES=5
class AiHandler:
"""
@ -34,7 +34,7 @@ class AiHandler:
except AttributeError as e:
raise ValueError("OpenAI key is required") from e
@retry(exceptions=(APIError, Timeout, TryAgain, AttributeError),
@retry(exceptions=(APIError, Timeout, TryAgain, AttributeError, RateLimitError),
tries=OPENAI_RETRIES, delay=2, backoff=2, jitter=(1, 3))
async def chat_completion(self, model: str, temperature: float, system: str, user: str):
"""
@ -69,6 +69,12 @@ class AiHandler:
except (APIError, Timeout, TryAgain) as e:
logging.error("Error during OpenAI inference: ", e)
raise
except (RateLimitError) as e:
logging.error("Rate limit error during OpenAI inference: ", e)
raise
except (Exception) as e:
logging.error("Unknown error during OpenAI inference: ", e)
raise TryAgain from e
if response is None or len(response.choices) == 0:
raise TryAgain
resp = response.choices[0]['message']['content']

View File

@ -105,6 +105,9 @@ class GithubProvider(GitProvider):
# self.pr.create_issue_comment(pr_comment)
def publish_comment(self, pr_comment: str, is_temporary: bool = False):
if is_temporary and not settings.config.publish_output_progress:
logging.debug(f"Skipping publish_comment for temporary comment: {pr_comment}")
return
response = self.pr.create_issue_comment(pr_comment)
if hasattr(response, "user") and hasattr(response.user, "login"):
self.github_user_id = response.user.login
@ -205,7 +208,7 @@ class GithubProvider(GitProvider):
def remove_initial_comment(self):
try:
for comment in self.pr.comments_list:
for comment in getattr(self.pr, 'comments_list', []):
if comment.is_temporary:
comment.delete()
except Exception as e:

View File

@ -259,4 +259,7 @@ class GitLabProvider(GitProvider):
return None
def publish_labels(self, labels):
pass
def publish_inline_comments(self, comments: list[dict]):
pass

View File

@ -2,6 +2,7 @@
model="gpt-4-0613"
git_provider="github"
publish_output=true
publish_output_progress=true
verbosity_level=0 # 0,1,2
[pr_reviewer]