From 34e421f79bfbd8dac76255200b09a7646dc11b19 Mon Sep 17 00:00:00 2001 From: mrT23 Date: Sun, 25 Feb 2024 09:58:58 +0200 Subject: [PATCH] Refactor logging statements for better readability and debugging --- pr_agent/algo/ai_handlers/litellm_ai_handler.py | 11 +++++++---- pr_agent/algo/pr_processing.py | 2 +- 2 files changed, 8 insertions(+), 5 deletions(-) diff --git a/pr_agent/algo/ai_handlers/litellm_ai_handler.py b/pr_agent/algo/ai_handlers/litellm_ai_handler.py index 165976c7..a36f2672 100644 --- a/pr_agent/algo/ai_handlers/litellm_ai_handler.py +++ b/pr_agent/algo/ai_handlers/litellm_ai_handler.py @@ -100,6 +100,7 @@ class LiteLLMAIHandler(BaseAiHandler): TryAgain: If there is an attribute error during OpenAI inference. """ try: + resp, finish_reason = None, None deployment_id = self.deployment_id if self.azure: model = 'azure/' + model @@ -127,9 +128,11 @@ class LiteLLMAIHandler(BaseAiHandler): raise TryAgain from e if response is None or len(response["choices"]) == 0: raise TryAgain - resp = response["choices"][0]['message']['content'] - finish_reason = response["choices"][0]["finish_reason"] - # usage = response.get("usage") + else: + resp = response["choices"][0]['message']['content'] + finish_reason = response["choices"][0]["finish_reason"] + # usage = response.get("usage") + get_logger().debug(f"\nAI response:\n{resp}") + get_logger().debug("full_response", response=response) - get_logger().debug(f"\nAI response:\n{resp}", full_response=response) return resp, finish_reason \ No newline at end of file diff --git a/pr_agent/algo/pr_processing.py b/pr_agent/algo/pr_processing.py index d4a195d7..90482a02 100644 --- a/pr_agent/algo/pr_processing.py +++ b/pr_agent/algo/pr_processing.py @@ -254,7 +254,7 @@ async def retry_with_fallback_models(f: Callable, model_type: ModelType = ModelT ) get_settings().set("openai.deployment_id", deployment_id) return await f(model) - except Exception as e: + except: get_logger().warning( f"Failed to generate prediction with {model}" f"{(' from deployment ' + deployment_id) if deployment_id else ''}: "