diff --git a/pr_agent/algo/ai_handler.py b/pr_agent/algo/ai_handler.py index b48924d6..819ba25b 100644 --- a/pr_agent/algo/ai_handler.py +++ b/pr_agent/algo/ai_handler.py @@ -1,4 +1,5 @@ import logging +import os import litellm import openai @@ -24,6 +25,11 @@ class AiHandler: try: openai.api_key = get_settings().openai.key litellm.openai_key = get_settings().openai.key + if get_settings().get("litellm.use_client"): + litellm_token = get_settings().get("litellm.LITELLM_TOKEN") + assert litellm_token, "LITELLM_TOKEN is required" + os.environ["LITELLM_TOKEN"] = litellm_token + litellm.use_client = True self.azure = False if get_settings().get("OPENAI.ORG", None): litellm.organization = get_settings().openai.org diff --git a/pr_agent/settings/configuration.toml b/pr_agent/settings/configuration.toml index 9b6a3323..eb5878d0 100644 --- a/pr_agent/settings/configuration.toml +++ b/pr_agent/settings/configuration.toml @@ -94,3 +94,7 @@ polling_interval_seconds = 30 # patch_server_endpoint = "http://127.0.0.1:5000/patch" # token to authenticate in the patch server # patch_server_token = "" + +[litellm] +#LITELLM_TOKEN = "..." # see https://docs.litellm.ai/docs/debugging/hosted_debugging for details and instructions +#use_client = false diff --git a/requirements.txt b/requirements.txt index 6a17d715..2f155d60 100644 --- a/requirements.txt +++ b/requirements.txt @@ -13,7 +13,7 @@ atlassian-python-api==3.39.0 GitPython~=3.1.32 PyYAML==6.0 starlette-context==0.3.6 -litellm~=0.1.538 +litellm~=0.1.574 boto3~=1.28.25 google-cloud-storage==2.10.0 ujson==5.8.0