code cleanup

This commit is contained in:
Krrish Dholakia 2023-08-10 17:57:57 -07:00
parent 695d777ee5
commit 85753c0d18
4 changed files with 1 additions and 116 deletions

View file

@ -702,60 +702,3 @@ class CustomStreamWrapper:
completion_obj["content"] = chunk.text
# return this for all models
return {"choices": [{"delta": completion_obj}]}
############# Caching Implementation v0 using chromaDB ############################
cache_collection = None
def make_collection():
global cache_collection
import chromadb
client = chromadb.Client()
cache_collection = client.create_collection("llm_responses")
def message_to_user_question(messages):
user_question = ""
for message in messages:
if message['role'] == 'user':
user_question += message["content"]
return user_question
def add_cache(messages, model_response):
global cache_collection
if cache_collection == None:
make_collection()
# print("cache collection in add cache", cache_collection)
user_question = message_to_user_question(messages)
cache_collection.add(
documents=[user_question],
metadatas=[{"model_response": str(model_response)}],
ids = [ str(uuid.uuid4())]
)
# print("in add cache, peek()", cache_collection.peek())
return
def get_cache(messages):
# print("in get cache")
try:
global cache_collection
if cache_collection == None:
make_collection()
# print("cache collection", cache_collection)
user_question = message_to_user_question(messages)
results = cache_collection.query(
query_texts=[user_question],
n_results=1
)
# print("query cache result", results)
distance = results['distances'][0][0]
sim = (1 - distance)
if sim >= litellm.cache_similarity_threshold:
# return response
print("got cache hit!")
return dict(results['metadatas'][0][0])
else:
# no hit
return None
except Exception as e:
# print("error in get cache", e)
return None