Merge pull request #836 from gregoryboue/main

feat: allows ollama usage
This commit is contained in:
Tal
2024-04-02 16:30:54 +03:00
committed by GitHub

View File

@ -61,6 +61,9 @@ class LiteLLMAIHandler(BaseAiHandler):
if get_settings().get("HUGGINGFACE.API_BASE", None) and 'huggingface' in get_settings().config.model: if get_settings().get("HUGGINGFACE.API_BASE", None) and 'huggingface' in get_settings().config.model:
litellm.api_base = get_settings().huggingface.api_base litellm.api_base = get_settings().huggingface.api_base
self.api_base = get_settings().huggingface.api_base self.api_base = get_settings().huggingface.api_base
if get_settings().get("OLLAMA.API_BASE", None) :
litellm.api_base = get_settings().ollama.api_base
self.api_base = get_settings().ollama.api_base
if get_settings().get("HUGGINGFACE.REPITITION_PENALTY", None): if get_settings().get("HUGGINGFACE.REPITITION_PENALTY", None):
self.repetition_penalty = float(get_settings().huggingface.repetition_penalty) self.repetition_penalty = float(get_settings().huggingface.repetition_penalty)
if get_settings().get("VERTEXAI.VERTEX_PROJECT", None): if get_settings().get("VERTEXAI.VERTEX_PROJECT", None):
@ -150,4 +153,4 @@ class LiteLLMAIHandler(BaseAiHandler):
if get_settings().config.verbosity_level >= 2: if get_settings().config.verbosity_level >= 2:
get_logger().info(f"\nAI response:\n{resp}") get_logger().info(f"\nAI response:\n{resp}")
return resp, finish_reason return resp, finish_reason