1. update LangChainOpenAIHandler to support langchain version 0.2

2. read openai_api_base from settings for llms that compatible with openai
This commit is contained in:
ryan
2024-06-06 22:27:01 +08:00
parent 6d6fb67306
commit b28f66aaa0

View File

@ -1,6 +1,6 @@
try: try:
from langchain.chat_models import ChatOpenAI, AzureChatOpenAI from langchain_openai import ChatOpenAI, AzureChatOpenAI
from langchain.schema import SystemMessage, HumanMessage from langchain_core.messages import SystemMessage, HumanMessage
except: # we don't enforce langchain as a dependency, so if it's not installed, just move on except: # we don't enforce langchain as a dependency, so if it's not installed, just move on
pass pass
@ -8,7 +8,7 @@ from pr_agent.algo.ai_handlers.base_ai_handler import BaseAiHandler
from pr_agent.config_loader import get_settings from pr_agent.config_loader import get_settings
from pr_agent.log import get_logger from pr_agent.log import get_logger
from openai.error import APIError, RateLimitError, Timeout, TryAgain from openai import APIError, RateLimitError, Timeout
from retry import retry from retry import retry
import functools import functools
@ -31,20 +31,24 @@ class LangChainOpenAIHandler(BaseAiHandler):
openai_api_version=get_settings().openai.api_version, openai_api_version=get_settings().openai.api_version,
) )
else: else:
# for llms that compatible with openai, should use custom api base
openai_api_base = get_settings().get("OPENAI.API_BASE", None)
if openai_api_base is None or len(openai_api_base) == 0:
self._chat = ChatOpenAI(openai_api_key=get_settings().openai.key) self._chat = ChatOpenAI(openai_api_key=get_settings().openai.key)
else:
self._chat = ChatOpenAI(openai_api_key=get_settings().openai.key, openai_api_base=openai_api_base)
except AttributeError as e: except AttributeError as e:
if getattr(e, "name"): if getattr(e, "name"):
raise ValueError(f"OpenAI {e.name} is required") from e raise ValueError(f"OpenAI {e.name} is required") from e
else: else:
raise e raise e
@property def chat(self, messages: list, model: str, temperature: float):
def chat(self):
if self.azure: if self.azure:
# we must set the deployment_id only here (instead of the __init__ method) to support fallback_deployments # we must set the deployment_id only here (instead of the __init__ method) to support fallback_deployments
return self._chat(deployment_name=self.deployment_id) return self._chat.invoke(input = messages, model=model, temperature=temperature, deployment_name=self.deployment_id)
else: else:
return self._chat return self._chat.invoke(input = messages, model=model, temperature=temperature)
@property @property
def deployment_id(self): def deployment_id(self):
@ -53,7 +57,7 @@ class LangChainOpenAIHandler(BaseAiHandler):
""" """
return get_settings().get("OPENAI.DEPLOYMENT_ID", None) return get_settings().get("OPENAI.DEPLOYMENT_ID", None)
@retry(exceptions=(APIError, Timeout, TryAgain, AttributeError, RateLimitError), @retry(exceptions=(APIError, Timeout, AttributeError, RateLimitError),
tries=OPENAI_RETRIES, delay=2, backoff=2, jitter=(1, 3)) tries=OPENAI_RETRIES, delay=2, backoff=2, jitter=(1, 3))
async def chat_completion(self, model: str, system: str, user: str, temperature: float = 0.2): async def chat_completion(self, model: str, system: str, user: str, temperature: float = 0.2):
try: try: