refactor(ai_handler): improve streaming response handling robustness

This commit is contained in:
Makonike
2025-07-09 15:39:15 +08:00
parent 85e1e2d4ee
commit 31e25a5965

View File

@ -448,11 +448,13 @@ class LiteLLMAIHandler(BaseAiHandler):
try: try:
async for chunk in response: async for chunk in response:
if chunk.choices and len(chunk.choices) > 0: if chunk.choices and len(chunk.choices) > 0:
delta = chunk.choices[0].delta choice = chunk.choices[0]
if hasattr(delta, 'content') and delta.content: delta = choice.delta
full_response += delta.content content = getattr(delta, 'content', None)
if chunk.choices[0].finish_reason: if content:
finish_reason = chunk.choices[0].finish_reason full_response += content
if choice.finish_reason:
finish_reason = choice.finish_reason
except Exception as e: except Exception as e:
get_logger().error(f"Error handling streaming response: {e}") get_logger().error(f"Error handling streaming response: {e}")
raise raise