From 501b0595759c47549ae3fc155ad79b5a29bf4835 Mon Sep 17 00:00:00 2001 From: gregoryboue Date: Tue, 2 Apr 2024 11:01:45 +0200 Subject: [PATCH] feat: allows ollama usage Fix https://github.com/Codium-ai/pr-agent/issues/657 --- pr_agent/algo/ai_handlers/litellm_ai_handler.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/pr_agent/algo/ai_handlers/litellm_ai_handler.py b/pr_agent/algo/ai_handlers/litellm_ai_handler.py index ce4d1db0..d07542f6 100644 --- a/pr_agent/algo/ai_handlers/litellm_ai_handler.py +++ b/pr_agent/algo/ai_handlers/litellm_ai_handler.py @@ -61,6 +61,9 @@ class LiteLLMAIHandler(BaseAiHandler): if get_settings().get("HUGGINGFACE.API_BASE", None) and 'huggingface' in get_settings().config.model: litellm.api_base = get_settings().huggingface.api_base self.api_base = get_settings().huggingface.api_base + if get_settings().get("OLLAMA.API_BASE", None) : + litellm.api_base = get_settings().ollama.api_base + self.api_base = get_settings().ollama.api_base if get_settings().get("HUGGINGFACE.REPITITION_PENALTY", None): self.repetition_penalty = float(get_settings().huggingface.repetition_penalty) if get_settings().get("VERTEXAI.VERTEX_PROJECT", None): @@ -150,4 +153,4 @@ class LiteLLMAIHandler(BaseAiHandler): if get_settings().config.verbosity_level >= 2: get_logger().info(f"\nAI response:\n{resp}") - return resp, finish_reason \ No newline at end of file + return resp, finish_reason