From 5553f84d511fc352dc95cbf49ad752eefbfeefa5 Mon Sep 17 00:00:00 2001 From: fracapuano Date: Thu, 25 Jul 2024 19:06:07 +0200 Subject: [PATCH] fix: now supports single tokens prediction --- litellm/llms/replicate.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/litellm/llms/replicate.py b/litellm/llms/replicate.py index 1dd29fd7d6..0d129ce028 100644 --- a/litellm/llms/replicate.py +++ b/litellm/llms/replicate.py @@ -387,7 +387,7 @@ def process_response( result = " " ## Building RESPONSE OBJECT - if len(result) > 1: + if len(result) >= 1: model_response.choices[0].message.content = result # type: ignore # Calculate usage