From c86f8e1eb4ac6ebd2fbb44151ab0ff1a87cef4c8 Mon Sep 17 00:00:00 2001 From: fracapuano Date: Thu, 25 Jul 2024 19:06:07 +0200 Subject: [PATCH] fix: now supports single tokens prediction --- litellm/llms/replicate.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/litellm/llms/replicate.py b/litellm/llms/replicate.py index 1dd29fd7d..0d129ce02 100644 --- a/litellm/llms/replicate.py +++ b/litellm/llms/replicate.py @@ -387,7 +387,7 @@ def process_response( result = " " ## Building RESPONSE OBJECT - if len(result) > 1: + if len(result) >= 1: model_response.choices[0].message.content = result # type: ignore # Calculate usage