From e7b05732f83a0f17d5fac06811b440d612567998 Mon Sep 17 00:00:00 2001 From: chandan84 Date: Sat, 22 Feb 2025 14:12:39 -0500 Subject: [PATCH] line 253-255, pass extra_headers fields from settings to litellm --- pr_agent/algo/ai_handlers/litellm_ai_handler.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/pr_agent/algo/ai_handlers/litellm_ai_handler.py b/pr_agent/algo/ai_handlers/litellm_ai_handler.py index b22b834e..77c9842f 100644 --- a/pr_agent/algo/ai_handlers/litellm_ai_handler.py +++ b/pr_agent/algo/ai_handlers/litellm_ai_handler.py @@ -11,6 +11,7 @@ from pr_agent.algo.ai_handlers.base_ai_handler import BaseAiHandler from pr_agent.algo.utils import get_version from pr_agent.config_loader import get_settings from pr_agent.log import get_logger +import json OPENAI_RETRIES = 5 @@ -249,6 +250,10 @@ class LiteLLMAIHandler(BaseAiHandler): get_logger().info(f"\nSystem prompt:\n{system}") get_logger().info(f"\nUser prompt:\n{user}") + #Added support for extra_headers while using litellm to call underlying model, via a api management gateway, would allow for passing custom headers for security and authorization + if get_settings().get("LITELLM.EXTRA_HEADERS", None): + kwargs["extra_headers"] = json.loads(get_settings().litellm.extra_headers) + response = await acompletion(**kwargs) except (openai.APIError, openai.APITimeoutError) as e: get_logger().warning(f"Error during LLM inference: {e}")