diff --git a/pr_agent/algo/ai_handler.py b/pr_agent/algo/ai_handler.py index b48924d6..819ba25b 100644 --- a/pr_agent/algo/ai_handler.py +++ b/pr_agent/algo/ai_handler.py @@ -1,4 +1,5 @@ import logging +import os import litellm import openai @@ -24,6 +25,11 @@ class AiHandler: try: openai.api_key = get_settings().openai.key litellm.openai_key = get_settings().openai.key + if get_settings().get("litellm.use_client"): + litellm_token = get_settings().get("litellm.LITELLM_TOKEN") + assert litellm_token, "LITELLM_TOKEN is required" + os.environ["LITELLM_TOKEN"] = litellm_token + litellm.use_client = True self.azure = False if get_settings().get("OPENAI.ORG", None): litellm.organization = get_settings().openai.org diff --git a/pr_agent/settings/.secrets_template.toml b/pr_agent/settings/.secrets_template.toml index 983e11d0..f1971a3b 100644 --- a/pr_agent/settings/.secrets_template.toml +++ b/pr_agent/settings/.secrets_template.toml @@ -57,3 +57,6 @@ personal_access_token = "" [bitbucket] # Bitbucket personal bearer token bearer_token = "" + +[litellm] +LITELLM_TOKEN = "" # see https://docs.litellm.ai/docs/debugging/hosted_debugging for details and instructions on how to get a token \ No newline at end of file diff --git a/pr_agent/settings/configuration.toml b/pr_agent/settings/configuration.toml index da3e1924..75dfcf97 100644 --- a/pr_agent/settings/configuration.toml +++ b/pr_agent/settings/configuration.toml @@ -95,6 +95,8 @@ polling_interval_seconds = 30 # token to authenticate in the patch server # patch_server_token = "" +[litellm] +#use_client = false [pr_similar_issue] skip_comments = false diff --git a/requirements.txt b/requirements.txt index f044e8d9..8791a115 100644 --- a/requirements.txt +++ b/requirements.txt @@ -13,7 +13,7 @@ atlassian-python-api==3.39.0 GitPython==3.1.32 PyYAML==6.0 starlette-context==0.3.6 -litellm~=0.1.538 +litellm~=0.1.574 boto3==1.28.25 google-cloud-storage==2.10.0 ujson==5.8.0