From e89b65ed38d8c3b2cb016fdc4e0963e83f48182b Mon Sep 17 00:00:00 2001 From: dceoy Date: Mon, 30 Dec 2024 11:35:02 +0900 Subject: [PATCH] Increase the max token of groq/llama-3.3-70b-versatile --- pr_agent/algo/__init__.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pr_agent/algo/__init__.py b/pr_agent/algo/__init__.py index b092077a..72eb7912 100644 --- a/pr_agent/algo/__init__.py +++ b/pr_agent/algo/__init__.py @@ -67,7 +67,7 @@ MAX_TOKENS = { 'groq/llama3-8b-8192': 8192, 'groq/llama3-70b-8192': 8192, 'groq/llama-3.1-8b-instant': 8192, - 'groq/llama-3.3-70b-versatile': 8192, + 'groq/llama-3.3-70b-versatile': 128000, 'groq/mixtral-8x7b-32768': 32768, 'groq/gemma2-9b-it': 8192, 'ollama/llama3': 4096,