From 22c16f586bf7f732d42e2ba69c9dd8810430bb66 Mon Sep 17 00:00:00 2001 From: dcieslak19973 Date: Sun, 22 Jun 2025 11:05:08 -0500 Subject: [PATCH] Add bedrock Llama 4 Scout/Maverick --- docs/docs/usage-guide/changing_a_model.md | 8 ++++++++ pr_agent/algo/__init__.py | 2 ++ 2 files changed, 10 insertions(+) diff --git a/docs/docs/usage-guide/changing_a_model.md b/docs/docs/usage-guide/changing_a_model.md index 46a72886..9648e6cf 100644 --- a/docs/docs/usage-guide/changing_a_model.md +++ b/docs/docs/usage-guide/changing_a_model.md @@ -232,6 +232,14 @@ AWS_SECRET_ACCESS_KEY="..." AWS_REGION_NAME="..." ``` +You can also use the new Meta Llama 4 models available on Amazon Bedrock: + +```toml +[config] # in configuration.toml +model="bedrock/us.meta.llama4-scout-17b-instruct-v1:0" +fallback_models=["bedrock/us.meta.llama4-maverick-17b-instruct-v1:0"] +``` + See [litellm](https://docs.litellm.ai/docs/providers/bedrock#usage) documentation for more information about the environment variables required for Amazon Bedrock. ### DeepSeek diff --git a/pr_agent/algo/__init__.py b/pr_agent/algo/__init__.py index d6f45de7..475e5496 100644 --- a/pr_agent/algo/__init__.py +++ b/pr_agent/algo/__init__.py @@ -109,6 +109,8 @@ MAX_TOKENS = { 'claude-3-5-sonnet': 100000, 'groq/meta-llama/llama-4-scout-17b-16e-instruct': 131072, 'groq/meta-llama/llama-4-maverick-17b-128e-instruct': 131072, + 'bedrock/us.meta.llama4-scout-17b-instruct-v1:0': 3500000, + 'bedrock/us.meta.llama4-maverick-17b-instruct-v1:0': 1000000, 'groq/llama3-8b-8192': 8192, 'groq/llama3-70b-8192': 8192, 'groq/llama-3.1-8b-instant': 8192,