From e7b05732f83a0f17d5fac06811b440d612567998 Mon Sep 17 00:00:00 2001 From: chandan84 Date: Sat, 22 Feb 2025 14:12:39 -0500 Subject: [PATCH 1/6] line 253-255, pass extra_headers fields from settings to litellm --- pr_agent/algo/ai_handlers/litellm_ai_handler.py | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/pr_agent/algo/ai_handlers/litellm_ai_handler.py b/pr_agent/algo/ai_handlers/litellm_ai_handler.py index b22b834e..77c9842f 100644 --- a/pr_agent/algo/ai_handlers/litellm_ai_handler.py +++ b/pr_agent/algo/ai_handlers/litellm_ai_handler.py @@ -11,6 +11,7 @@ from pr_agent.algo.ai_handlers.base_ai_handler import BaseAiHandler from pr_agent.algo.utils import get_version from pr_agent.config_loader import get_settings from pr_agent.log import get_logger +import json OPENAI_RETRIES = 5 @@ -249,6 +250,10 @@ class LiteLLMAIHandler(BaseAiHandler): get_logger().info(f"\nSystem prompt:\n{system}") get_logger().info(f"\nUser prompt:\n{user}") + #Added support for extra_headers while using litellm to call underlying model, via a api management gateway, would allow for passing custom headers for security and authorization + if get_settings().get("LITELLM.EXTRA_HEADERS", None): + kwargs["extra_headers"] = json.loads(get_settings().litellm.extra_headers) + response = await acompletion(**kwargs) except (openai.APIError, openai.APITimeoutError) as e: get_logger().warning(f"Error during LLM inference: {e}") From 0e4a1d9ab8fae6a1c98d14e2a954c0dc6807e2f6 Mon Sep 17 00:00:00 2001 From: chandan84 Date: Sat, 22 Feb 2025 14:38:38 -0500 Subject: [PATCH 2/6] line 253-258, pass extra_headers fields from settings to litellm, exception handling to check if extra_headers is in dict format --- pr_agent/algo/ai_handlers/litellm_ai_handler.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/pr_agent/algo/ai_handlers/litellm_ai_handler.py b/pr_agent/algo/ai_handlers/litellm_ai_handler.py index 77c9842f..4ab5ee34 100644 --- a/pr_agent/algo/ai_handlers/litellm_ai_handler.py +++ b/pr_agent/algo/ai_handlers/litellm_ai_handler.py @@ -251,8 +251,10 @@ class LiteLLMAIHandler(BaseAiHandler): get_logger().info(f"\nUser prompt:\n{user}") #Added support for extra_headers while using litellm to call underlying model, via a api management gateway, would allow for passing custom headers for security and authorization - if get_settings().get("LITELLM.EXTRA_HEADERS", None): - kwargs["extra_headers"] = json.loads(get_settings().litellm.extra_headers) + litellm_extra_headers = json.loads(get_settings().litellm.extra_headers) + if not isinstance(litellm_extra_headers, dict): + raise ValueError("LITELLM.EXTRA_HEADERS must be a JSON object") + kwargs["extra_headers"] = litellm_extra_headers response = await acompletion(**kwargs) except (openai.APIError, openai.APITimeoutError) as e: From 71451de156f172114f9c237fcfd260adaa056f9b Mon Sep 17 00:00:00 2001 From: chandan84 <61125092+chandan84@users.noreply.github.com> Date: Sat, 22 Feb 2025 14:43:03 -0500 Subject: [PATCH 3/6] Update litellm_ai_handler.py line 253-258, pass extra_headers fields from settings to litellm, exception handling to check if extra_headers is in dict format --- pr_agent/algo/ai_handlers/litellm_ai_handler.py | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/pr_agent/algo/ai_handlers/litellm_ai_handler.py b/pr_agent/algo/ai_handlers/litellm_ai_handler.py index 4ab5ee34..016a5eb3 100644 --- a/pr_agent/algo/ai_handlers/litellm_ai_handler.py +++ b/pr_agent/algo/ai_handlers/litellm_ai_handler.py @@ -251,10 +251,11 @@ class LiteLLMAIHandler(BaseAiHandler): get_logger().info(f"\nUser prompt:\n{user}") #Added support for extra_headers while using litellm to call underlying model, via a api management gateway, would allow for passing custom headers for security and authorization - litellm_extra_headers = json.loads(get_settings().litellm.extra_headers) - if not isinstance(litellm_extra_headers, dict): - raise ValueError("LITELLM.EXTRA_HEADERS must be a JSON object") - kwargs["extra_headers"] = litellm_extra_headers + if get_settings().get("LITELLM.EXTRA_HEADERS", None): + litellm_extra_headers = json.loads(get_settings().litellm.extra_headers) + if not isinstance(litellm_extra_headers, dict): + raise ValueError("LITELLM.EXTRA_HEADERS must be a JSON object") + kwargs["extra_headers"] = litellm_extra_headers response = await acompletion(**kwargs) except (openai.APIError, openai.APITimeoutError) as e: From 84983f3e9d753f04f2af42228bb416a05cea75b5 Mon Sep 17 00:00:00 2001 From: chandan84 Date: Sat, 22 Feb 2025 14:56:17 -0500 Subject: [PATCH 4/6] line 253-261, pass extra_headers fields from settings to litellm, exception handling to check if extra_headers is in dict format --- pr_agent/algo/ai_handlers/litellm_ai_handler.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/pr_agent/algo/ai_handlers/litellm_ai_handler.py b/pr_agent/algo/ai_handlers/litellm_ai_handler.py index 016a5eb3..56125aff 100644 --- a/pr_agent/algo/ai_handlers/litellm_ai_handler.py +++ b/pr_agent/algo/ai_handlers/litellm_ai_handler.py @@ -252,9 +252,12 @@ class LiteLLMAIHandler(BaseAiHandler): #Added support for extra_headers while using litellm to call underlying model, via a api management gateway, would allow for passing custom headers for security and authorization if get_settings().get("LITELLM.EXTRA_HEADERS", None): - litellm_extra_headers = json.loads(get_settings().litellm.extra_headers) - if not isinstance(litellm_extra_headers, dict): - raise ValueError("LITELLM.EXTRA_HEADERS must be a JSON object") + try: + litellm_extra_headers = json.loads(get_settings().litellm.extra_headers) + if not isinstance(litellm_extra_headers, dict): + raise ValueError("LITELLM.EXTRA_HEADERS must be a JSON object") + except json.JSONDecodeError as e: + raise ValueError(f"LITELLM.EXTRA_HEADERS contains invalid JSON: {str(e)}") kwargs["extra_headers"] = litellm_extra_headers response = await acompletion(**kwargs) From 93e34703aba7caf77408fbd9c0771c120d8ee3c7 Mon Sep 17 00:00:00 2001 From: chandan84 <61125092+chandan84@users.noreply.github.com> Date: Tue, 25 Feb 2025 14:44:03 -0500 Subject: [PATCH 5/6] Update litellm_ai_handler.py updates made based on review on https://github.com/qodo-ai/pr-agent/pull/1564 --- pr_agent/algo/ai_handlers/litellm_ai_handler.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/pr_agent/algo/ai_handlers/litellm_ai_handler.py b/pr_agent/algo/ai_handlers/litellm_ai_handler.py index 56125aff..bf07ea78 100644 --- a/pr_agent/algo/ai_handlers/litellm_ai_handler.py +++ b/pr_agent/algo/ai_handlers/litellm_ai_handler.py @@ -244,12 +244,6 @@ class LiteLLMAIHandler(BaseAiHandler): if self.repetition_penalty: kwargs["repetition_penalty"] = self.repetition_penalty - get_logger().debug("Prompts", artifact={"system": system, "user": user}) - - if get_settings().config.verbosity_level >= 2: - get_logger().info(f"\nSystem prompt:\n{system}") - get_logger().info(f"\nUser prompt:\n{user}") - #Added support for extra_headers while using litellm to call underlying model, via a api management gateway, would allow for passing custom headers for security and authorization if get_settings().get("LITELLM.EXTRA_HEADERS", None): try: @@ -259,6 +253,12 @@ class LiteLLMAIHandler(BaseAiHandler): except json.JSONDecodeError as e: raise ValueError(f"LITELLM.EXTRA_HEADERS contains invalid JSON: {str(e)}") kwargs["extra_headers"] = litellm_extra_headers + + get_logger().debug("Prompts", artifact={"system": system, "user": user}) + + if get_settings().config.verbosity_level >= 2: + get_logger().info(f"\nSystem prompt:\n{system}") + get_logger().info(f"\nUser prompt:\n{user}") response = await acompletion(**kwargs) except (openai.APIError, openai.APITimeoutError) as e: From c89c0eab8cd8210aea679b2ea9189946841ae5fe Mon Sep 17 00:00:00 2001 From: chandan84 <61125092+chandan84@users.noreply.github.com> Date: Tue, 25 Feb 2025 15:15:53 -0500 Subject: [PATCH 6/6] Update changing_a_model.md updated docs for extra_headers usage for litellm --- docs/docs/usage-guide/changing_a_model.md | 8 ++++++++ 1 file changed, 8 insertions(+) diff --git a/docs/docs/usage-guide/changing_a_model.md b/docs/docs/usage-guide/changing_a_model.md index d221b953..977bd022 100644 --- a/docs/docs/usage-guide/changing_a_model.md +++ b/docs/docs/usage-guide/changing_a_model.md @@ -30,6 +30,14 @@ model="" # the OpenAI model you've deployed on Azure (e.g. gpt-4o) fallback_models=["..."] ``` +Passing custom headers to the underlying LLM Model API can be done by setting extra_headers parameter to litellm. +``` +[litellm] +extra_headers='{"projectId": "", ...}') #The value of this setting should be a JSON string representing the desired headers, a ValueError is thrown otherwise. +``` +This enables users to pass authorization tokens or API keys, when routing requests through an API management gateway. + + ### Ollama You can run models locally through either [VLLM](https://docs.litellm.ai/docs/providers/vllm) or [Ollama](https://docs.litellm.ai/docs/providers/ollama)