Merge pull request #1921 from abhinav-1305/add-flex-processing

feat: support OpenAI Flex Processing via [litellm] extra_body config
This commit is contained in:
Tal
2025-07-12 19:51:31 +03:00
committed by GitHub
3 changed files with 48 additions and 0 deletions

View File

@ -16,6 +16,10 @@ key = "" # Acquire through https://platform.openai.com
#deployment_id = "" # The deployment name you chose when you deployed the engine
#fallback_deployments = [] # For each fallback model specified in configuration.toml in the [config] section, specify the appropriate deployment_id
# OpenAI Flex Processing (optional, for cost savings)
# [litellm]
# extra_body='{"processing_mode": "flex"}'
[pinecone]
api_key = "..."
environment = "gcp-starter"