with caching

This commit is contained in:
ishaan-jaff 2023-08-10 15:14:58 -07:00
parent e9c2f3b55d
commit a1b64832a7
2 changed files with 7 additions and 7 deletions

View file

@ -724,29 +724,29 @@ def add_cache(messages, model_response):
global cache_collection
if cache_collection == None:
make_collection()
print("cache collection in add cache", cache_collection)
# print("cache collection in add cache", cache_collection)
user_question = message_to_user_question(messages)
cache_collection.add(
documents=[user_question],
metadatas=[{"model_response": str(model_response)}],
ids = [ str(uuid.uuid4())]
)
print("in add cache, peek()", cache_collection.peek())
# print("in add cache, peek()", cache_collection.peek())
return
def get_cache(messages):
print("in get cache")
# print("in get cache")
try:
global cache_collection
if cache_collection == None:
make_collection()
print("cache collection", cache_collection)
# print("cache collection", cache_collection)
user_question = message_to_user_question(messages)
results = cache_collection.query(
query_texts=[user_question],
n_results=1
)
print("query cache result", results)
# print("query cache result", results)
distance = results['distances'][0][0]
sim = (1 - distance)
if sim >= litellm.cache_similarity_threshold:
@ -757,5 +757,5 @@ def get_cache(messages):
# no hit
return None
except Exception as e:
print("error in get cache", e)
# print("error in get cache", e)
return None