mirror of
https://github.com/BerriAI/litellm.git
synced 2025-04-27 19:54:13 +00:00
fix - increase default penalty for lowest latency
This commit is contained in:
parent
a86de641a5
commit
a26ecbad97
1 changed files with 1 additions and 1 deletions
|
@ -169,7 +169,7 @@ class LowestLatencyLoggingHandler(CustomLogger):
|
||||||
request_count_dict[id] = {}
|
request_count_dict[id] = {}
|
||||||
|
|
||||||
## Latency
|
## Latency
|
||||||
request_count_dict[id].setdefault("latency", []).append(100.0)
|
request_count_dict[id].setdefault("latency", []).append(1000.0)
|
||||||
self.router_cache.set_cache(
|
self.router_cache.set_cache(
|
||||||
key=latency_key,
|
key=latency_key,
|
||||||
value=request_count_dict,
|
value=request_count_dict,
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue