Merge pull request #293 from Codium-ai/tr/litellm_debugger

Integration of Litellm Client with AI Handler
This commit is contained in:
mrT23
2023-09-10 13:56:06 +03:00
committed by GitHub
4 changed files with 12 additions and 1 deletions

View File

@ -1,4 +1,5 @@
import logging
import os
import litellm
import openai
@ -24,6 +25,11 @@ class AiHandler:
try:
openai.api_key = get_settings().openai.key
litellm.openai_key = get_settings().openai.key
if get_settings().get("litellm.use_client"):
litellm_token = get_settings().get("litellm.LITELLM_TOKEN")
assert litellm_token, "LITELLM_TOKEN is required"
os.environ["LITELLM_TOKEN"] = litellm_token
litellm.use_client = True
self.azure = False
if get_settings().get("OPENAI.ORG", None):
litellm.organization = get_settings().openai.org

View File

@ -57,3 +57,6 @@ personal_access_token = ""
[bitbucket]
# Bitbucket personal bearer token
bearer_token = ""
[litellm]
LITELLM_TOKEN = "" # see https://docs.litellm.ai/docs/debugging/hosted_debugging for details and instructions on how to get a token

View File

@ -95,6 +95,8 @@ polling_interval_seconds = 30
# token to authenticate in the patch server
# patch_server_token = ""
[litellm]
#use_client = false
[pr_similar_issue]
skip_comments = false

View File

@ -13,7 +13,7 @@ atlassian-python-api==3.39.0
GitPython==3.1.32
PyYAML==6.0
starlette-context==0.3.6
litellm~=0.1.538
litellm~=0.1.574
boto3==1.28.25
google-cloud-storage==2.10.0
ujson==5.8.0