mirror of
https://github.com/qodo-ai/pr-agent.git
synced 2025-07-04 04:40:38 +08:00
Logging
This commit is contained in:
@ -74,9 +74,15 @@ class AiHandler:
|
|||||||
TryAgain: If there is an attribute error during OpenAI inference.
|
TryAgain: If there is an attribute error during OpenAI inference.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
|
deployment_id = self.deployment_id
|
||||||
|
if get_settings().config.verbosity_level >= 2:
|
||||||
|
logging.debug(
|
||||||
|
f"Generating completion with {model}"
|
||||||
|
f"{(' from deployment ' + deployment_id) if deployment_id else ''}"
|
||||||
|
)
|
||||||
response = await acompletion(
|
response = await acompletion(
|
||||||
model=model,
|
model=model,
|
||||||
deployment_id=self.deployment_id,
|
deployment_id=deployment_id,
|
||||||
messages=[
|
messages=[
|
||||||
{"role": "system", "content": system},
|
{"role": "system", "content": system},
|
||||||
{"role": "user", "content": user}
|
{"role": "user", "content": user}
|
||||||
|
@ -214,7 +214,6 @@ async def retry_with_fallback_models(f: Callable):
|
|||||||
if not isinstance(fallback_models, list):
|
if not isinstance(fallback_models, list):
|
||||||
fallback_models = [m.strip() for m in fallback_models.split(",")]
|
fallback_models = [m.strip() for m in fallback_models.split(",")]
|
||||||
all_models = [model] + fallback_models
|
all_models = [model] + fallback_models
|
||||||
|
|
||||||
# getting all deployments
|
# getting all deployments
|
||||||
deployment_id = get_settings().get("openai.deployment_id", None)
|
deployment_id = get_settings().get("openai.deployment_id", None)
|
||||||
fallback_deployments = get_settings().get("openai.fallback_deployments", [])
|
fallback_deployments = get_settings().get("openai.fallback_deployments", [])
|
||||||
@ -230,7 +229,11 @@ async def retry_with_fallback_models(f: Callable):
|
|||||||
get_settings().set("openai.deployment_id", deployment_id)
|
get_settings().set("openai.deployment_id", deployment_id)
|
||||||
return await f(model)
|
return await f(model)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.warning(f"Failed to generate prediction with {model}: {traceback.format_exc()}")
|
logging.warning(
|
||||||
|
f"Failed to generate prediction with {model}"
|
||||||
|
f"{(' from deployment ' + deployment_id) if deployment_id else ''}: "
|
||||||
|
f"{traceback.format_exc()}"
|
||||||
|
)
|
||||||
if i == len(all_models) - 1: # If it's the last iteration
|
if i == len(all_models) - 1: # If it's the last iteration
|
||||||
raise # Re-raise the last exception
|
raise # Re-raise the last exception
|
||||||
|
|
||||||
|
Reference in New Issue
Block a user