mirror of
https://github.com/qodo-ai/pr-agent.git
synced 2025-07-21 04:50:39 +08:00
Throw descriptive error message if model is not in MAX_TOKENS array
This commit is contained in:
4
Usage.md
4
Usage.md
@ -262,7 +262,7 @@ MAX_TOKENS = {
|
||||
e.g.
|
||||
MAX_TOKENS={
|
||||
...,
|
||||
"llama2": 4096
|
||||
"ollama/llama2": 4096
|
||||
}
|
||||
|
||||
|
||||
@ -271,6 +271,8 @@ model = "ollama/llama2"
|
||||
|
||||
[ollama] # in .secrets.toml
|
||||
api_base = ... # the base url for your huggingface inference endpoint
|
||||
# e.g. if running Ollama locally, you may use:
|
||||
api_base = "http://localhost:11434/"
|
||||
```
|
||||
|
||||
**Inference Endpoints**
|
||||
|
Reference in New Issue
Block a user