Add and document abilty to use LiteLLM Logging Observability tools

This commit is contained in:
MarkRx
2024-08-19 15:45:47 -04:00
parent aa87bc60f6
commit 8aa76a0ac5
3 changed files with 64 additions and 5 deletions

View File

@ -91,3 +91,24 @@ user="""
""" """
``` ```
Note that the new prompt will need to generate an output compatible with the relevant [post-process function](https://github.com/Codium-ai/pr-agent/blob/main/pr_agent/tools/pr_description.py#L137). Note that the new prompt will need to generate an output compatible with the relevant [post-process function](https://github.com/Codium-ai/pr-agent/blob/main/pr_agent/tools/pr_description.py#L137).
## Integrating with Logging Observability Platforms
Various logging observability tools can be used out-of-the box when using the default LiteLLM AI Handler. Simply configure the LiteLLM callback settings in `configuration.toml` and set environment variables according to the LiteLLM [documentation](https://docs.litellm.ai/docs/).
For example, to use [LangSmith](https://www.langchain.com/langsmith) you can add the following to your `configuration.toml` file:
```
[litellm]
...
success_callback = ["langsmith"]
failure_callback = ["langsmith"]
service_callback = []
```
Then set the following environment variables:
```
LANGSMITH_API_KEY=<api_key>
LANGSMITH_PROJECT=<project>
LANGSMITH_BASE_URL=<url>
```

View File

@ -1,10 +1,10 @@
import os import os
import requests import requests
import boto3
import litellm import litellm
import openai import openai
from litellm import acompletion from litellm import acompletion
from tenacity import retry, retry_if_exception_type, stop_after_attempt from tenacity import retry, retry_if_exception_type, stop_after_attempt
from pr_agent.algo.ai_handlers.base_ai_handler import BaseAiHandler from pr_agent.algo.ai_handlers.base_ai_handler import BaseAiHandler
from pr_agent.config_loader import get_settings from pr_agent.config_loader import get_settings
from pr_agent.log import get_logger from pr_agent.log import get_logger
@ -44,6 +44,12 @@ class LiteLLMAIHandler(BaseAiHandler):
litellm.use_client = True litellm.use_client = True
if get_settings().get("LITELLM.DROP_PARAMS", None): if get_settings().get("LITELLM.DROP_PARAMS", None):
litellm.drop_params = get_settings().litellm.drop_params litellm.drop_params = get_settings().litellm.drop_params
if get_settings().get("LITELLM.SUCCESS_CALLBACK", None):
litellm.success_callback = get_settings().litellm.success_callback
if get_settings().get("LITELLM.FAILURE_CALLBACK", None):
litellm.failure_callback = get_settings().litellm.failure_callback
if get_settings().get("LITELLM.SERVICE_CALLBACK", None):
litellm.service_callback = get_settings().litellm.service_callback
if get_settings().get("OPENAI.ORG", None): if get_settings().get("OPENAI.ORG", None):
litellm.organization = get_settings().openai.org litellm.organization = get_settings().openai.org
if get_settings().get("OPENAI.API_TYPE", None): if get_settings().get("OPENAI.API_TYPE", None):
@ -90,27 +96,56 @@ class LiteLLMAIHandler(BaseAiHandler):
return response_log return response_log
def add_litellm_callbacks(selfs, kwargs) -> dict: def add_litellm_callbacks(selfs, kwargs) -> dict:
pr_metadata = [] captured_extra = []
def capture_logs(message): def capture_logs(message):
# Parsing the log message and context # Parsing the log message and context
record = message.record record = message.record
log_entry = {} log_entry = {}
if record.get('extra', {}).get('command', None) is not None: if record.get('extra', None).get('command', None) is not None:
log_entry.update({"command": record['extra']["command"]}) log_entry.update({"command": record['extra']["command"]})
if record.get('extra', {}).get('pr_url', None) is not None: if record.get('extra', {}).get('pr_url', None) is not None:
log_entry.update({"pr_url": record['extra']["pr_url"]}) log_entry.update({"pr_url": record['extra']["pr_url"]})
# Append the log entry to the captured_logs list # Append the log entry to the captured_logs list
pr_metadata.append(log_entry) captured_extra.append(log_entry)
# Adding the custom sink to Loguru # Adding the custom sink to Loguru
handler_id = get_logger().add(capture_logs) handler_id = get_logger().add(capture_logs)
get_logger().debug("Capturing logs for litellm callbacks") get_logger().debug("Capturing logs for litellm callbacks")
get_logger().remove(handler_id) get_logger().remove(handler_id)
context = captured_extra[0] if len(captured_extra) > 0 else None
command = context.get("command", "unknown")
pr_url = context.get("pr_url", "unknown")
git_provider = get_settings().config.git_provider
metadata = dict()
callbacks = litellm.success_callback + litellm.failure_callback + litellm.service_callback
if "langfuse" in callbacks:
metadata.update({
"trace_name": command,
"tags": [git_provider, command],
"trace_metadata": {
"command": command,
"pr_url": pr_url,
},
})
if "langsmith" in callbacks:
metadata.update({
"run_name": command,
"tags": [git_provider, command],
"extra": {
"metadata": {
"command": command,
"pr_url": pr_url,
}
},
})
# Adding the captured logs to the kwargs # Adding the captured logs to the kwargs
kwargs["metadata"] = pr_metadata kwargs["metadata"] = metadata
return kwargs return kwargs

View File

@ -265,6 +265,9 @@ pr_commands = [
# use_client = false # use_client = false
# drop_params = false # drop_params = false
enable_callbacks = false enable_callbacks = false
success_callback = []
failure_callback = []
service_callback = []
[pr_similar_issue] [pr_similar_issue]
skip_comments = false skip_comments = false