From 8f943a0d44bfad4abc48800dabc846edce3a7866 Mon Sep 17 00:00:00 2001 From: mrT23 Date: Sun, 15 Sep 2024 08:07:59 +0300 Subject: [PATCH] fix: update error logging messages and system prompt handling in litellm_ai_handler.py --- pr_agent/algo/ai_handlers/litellm_ai_handler.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/pr_agent/algo/ai_handlers/litellm_ai_handler.py b/pr_agent/algo/ai_handlers/litellm_ai_handler.py index 282f662b..ed59b5ab 100644 --- a/pr_agent/algo/ai_handlers/litellm_ai_handler.py +++ b/pr_agent/algo/ai_handlers/litellm_ai_handler.py @@ -167,7 +167,7 @@ class LiteLLMAIHandler(BaseAiHandler): if self.azure: model = 'azure/' + model if 'claude' in model and not system: - system = "\n" + system = "No system prompt provided" get_logger().warning( "Empty system prompt for claude model. Adding a newline character to prevent OpenAI API error.") messages = [{"role": "system", "content": system}, {"role": "user", "content": user}] @@ -215,13 +215,13 @@ class LiteLLMAIHandler(BaseAiHandler): response = await acompletion(**kwargs) except (openai.APIError, openai.APITimeoutError) as e: - get_logger().warning("Error during OpenAI inference: ", e) + get_logger().warning("Error during LLM inference: {e}") raise except (openai.RateLimitError) as e: - get_logger().error("Rate limit error during OpenAI inference: ", e) + get_logger().error("Rate limit error during LLM inference: {e}") raise except (Exception) as e: - get_logger().warning("Unknown error during OpenAI inference: ", e) + get_logger().warning("Unknown error during LLM inference: {e}") raise openai.APIError from e if response is None or len(response["choices"]) == 0: raise openai.APIError