Merge pull request #1145 from MarkRx/feature/litellm-logging-observability

Add and document abilty to use LiteLLM Logging Observability tools
This commit is contained in:
Tal
2024-08-22 09:58:53 +03:00
committed by GitHub
4 changed files with 91 additions and 2 deletions

View File

@ -91,3 +91,24 @@ user="""
""" """
``` ```
Note that the new prompt will need to generate an output compatible with the relevant [post-process function](https://github.com/Codium-ai/pr-agent/blob/main/pr_agent/tools/pr_description.py#L137). Note that the new prompt will need to generate an output compatible with the relevant [post-process function](https://github.com/Codium-ai/pr-agent/blob/main/pr_agent/tools/pr_description.py#L137).
## Integrating with Logging Observability Platforms
Various logging observability tools can be used out-of-the box when using the default LiteLLM AI Handler. Simply configure the LiteLLM callback settings in `configuration.toml` and set environment variables according to the LiteLLM [documentation](https://docs.litellm.ai/docs/).
For example, to use [LangSmith](https://www.langchain.com/langsmith) you can add the following to your `configuration.toml` file:
```
[litellm]
...
success_callback = ["langsmith"]
failure_callback = ["langsmith"]
service_callback = []
```
Then set the following environment variables:
```
LANGSMITH_API_KEY=<api_key>
LANGSMITH_PROJECT=<project>
LANGSMITH_BASE_URL=<url>
```

View File

@ -79,7 +79,7 @@ class PRAgent:
if action not in command2class: if action not in command2class:
get_logger().debug(f"Unknown command: {action}") get_logger().debug(f"Unknown command: {action}")
return False return False
with get_logger().contextualize(command=action): with get_logger().contextualize(command=action, pr_url=pr_url):
get_logger().info("PR-Agent request handler started", analytics=True) get_logger().info("PR-Agent request handler started", analytics=True)
if action == "reflect_and_review": if action == "reflect_and_review":
get_settings().pr_reviewer.ask_and_reflect = True get_settings().pr_reviewer.ask_and_reflect = True

View File

@ -1,10 +1,10 @@
import os import os
import requests import requests
import boto3
import litellm import litellm
import openai import openai
from litellm import acompletion from litellm import acompletion
from tenacity import retry, retry_if_exception_type, stop_after_attempt from tenacity import retry, retry_if_exception_type, stop_after_attempt
from pr_agent.algo.ai_handlers.base_ai_handler import BaseAiHandler from pr_agent.algo.ai_handlers.base_ai_handler import BaseAiHandler
from pr_agent.config_loader import get_settings from pr_agent.config_loader import get_settings
from pr_agent.log import get_logger from pr_agent.log import get_logger
@ -44,6 +44,12 @@ class LiteLLMAIHandler(BaseAiHandler):
litellm.use_client = True litellm.use_client = True
if get_settings().get("LITELLM.DROP_PARAMS", None): if get_settings().get("LITELLM.DROP_PARAMS", None):
litellm.drop_params = get_settings().litellm.drop_params litellm.drop_params = get_settings().litellm.drop_params
if get_settings().get("LITELLM.SUCCESS_CALLBACK", None):
litellm.success_callback = get_settings().litellm.success_callback
if get_settings().get("LITELLM.FAILURE_CALLBACK", None):
litellm.failure_callback = get_settings().litellm.failure_callback
if get_settings().get("LITELLM.SERVICE_CALLBACK", None):
litellm.service_callback = get_settings().litellm.service_callback
if get_settings().get("OPENAI.ORG", None): if get_settings().get("OPENAI.ORG", None):
litellm.organization = get_settings().openai.org litellm.organization = get_settings().openai.org
if get_settings().get("OPENAI.API_TYPE", None): if get_settings().get("OPENAI.API_TYPE", None):
@ -89,6 +95,60 @@ class LiteLLMAIHandler(BaseAiHandler):
response_log['main_pr_language'] = 'unknown' response_log['main_pr_language'] = 'unknown'
return response_log return response_log
def add_litellm_callbacks(selfs, kwargs) -> dict:
captured_extra = []
def capture_logs(message):
# Parsing the log message and context
record = message.record
log_entry = {}
if record.get('extra', None).get('command', None) is not None:
log_entry.update({"command": record['extra']["command"]})
if record.get('extra', {}).get('pr_url', None) is not None:
log_entry.update({"pr_url": record['extra']["pr_url"]})
# Append the log entry to the captured_logs list
captured_extra.append(log_entry)
# Adding the custom sink to Loguru
handler_id = get_logger().add(capture_logs)
get_logger().debug("Capturing logs for litellm callbacks")
get_logger().remove(handler_id)
context = captured_extra[0] if len(captured_extra) > 0 else None
command = context.get("command", "unknown")
pr_url = context.get("pr_url", "unknown")
git_provider = get_settings().config.git_provider
metadata = dict()
callbacks = litellm.success_callback + litellm.failure_callback + litellm.service_callback
if "langfuse" in callbacks:
metadata.update({
"trace_name": command,
"tags": [git_provider, command],
"trace_metadata": {
"command": command,
"pr_url": pr_url,
},
})
if "langsmith" in callbacks:
metadata.update({
"run_name": command,
"tags": [git_provider, command],
"extra": {
"metadata": {
"command": command,
"pr_url": pr_url,
}
},
})
# Adding the captured logs to the kwargs
kwargs["metadata"] = metadata
return kwargs
@property @property
def deployment_id(self): def deployment_id(self):
""" """
@ -133,6 +193,10 @@ class LiteLLMAIHandler(BaseAiHandler):
"force_timeout": get_settings().config.ai_timeout, "force_timeout": get_settings().config.ai_timeout,
"api_base": self.api_base, "api_base": self.api_base,
} }
if get_settings().litellm.get("enable_callbacks", False):
kwargs = self.add_litellm_callbacks(kwargs)
seed = get_settings().config.get("seed", -1) seed = get_settings().config.get("seed", -1)
if temperature > 0 and seed >= 0: if temperature > 0 and seed >= 0:
raise ValueError(f"Seed ({seed}) is not supported with temperature ({temperature}) > 0") raise ValueError(f"Seed ({seed}) is not supported with temperature ({temperature}) > 0")

View File

@ -267,6 +267,10 @@ pr_commands = [
[litellm] [litellm]
# use_client = false # use_client = false
# drop_params = false # drop_params = false
enable_callbacks = false
success_callback = []
failure_callback = []
service_callback = []
[pr_similar_issue] [pr_similar_issue]
skip_comments = false skip_comments = false