fix(openai.py): fix client caching logic

This commit is contained in:
Krrish Dholakia 2024-06-01 16:45:34 -07:00
parent 63fb3a95be
commit 93c9ea160d
2 changed files with 4 additions and 1 deletions

View file

@ -14,6 +14,7 @@ from functools import partial
import dotenv, traceback, random, asyncio, time, contextvars
from copy import deepcopy
import httpx
import litellm
from ._logging import verbose_logger
from litellm import ( # type: ignore