forked from phoenix/litellm-mirror
(fix) pre commit hook to sync backup context_window mapping
This commit is contained in:
parent
8b571159fc
commit
70f36073dc
3 changed files with 15 additions and 22 deletions
|
@ -1,39 +1,32 @@
|
||||||
import sys
|
import sys
|
||||||
import filecmp
|
import filecmp
|
||||||
import difflib
|
import shutil
|
||||||
|
|
||||||
|
|
||||||
def show_diff(file1, file2):
|
|
||||||
with open(file1, "r") as f1, open(file2, "r") as f2:
|
|
||||||
lines1 = f1.readlines()
|
|
||||||
lines2 = f2.readlines()
|
|
||||||
|
|
||||||
diff = difflib.unified_diff(lines1, lines2, lineterm="")
|
|
||||||
|
|
||||||
for line in diff:
|
|
||||||
print(line)
|
|
||||||
|
|
||||||
|
|
||||||
def main(argv=None):
|
def main(argv=None):
|
||||||
print(
|
print(
|
||||||
"comparing model_prices_and_context_window, and litellm/model_prices_and_context_window_backup.json files.......... checking they match",
|
"Comparing model_prices_and_context_window and litellm/model_prices_and_context_window_backup.json files... checking if they match."
|
||||||
argv,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
file1 = "model_prices_and_context_window.json"
|
file1 = "model_prices_and_context_window.json"
|
||||||
file2 = "litellm/model_prices_and_context_window_backup.json"
|
file2 = "litellm/model_prices_and_context_window_backup.json"
|
||||||
|
|
||||||
cmp_result = filecmp.cmp(file1, file2, shallow=False)
|
cmp_result = filecmp.cmp(file1, file2, shallow=False)
|
||||||
|
|
||||||
if cmp_result:
|
if cmp_result:
|
||||||
print(f"Passed ! Files {file1} and {file2} match.")
|
print(f"Passed! Files {file1} and {file2} match.")
|
||||||
return 0
|
return 0
|
||||||
else:
|
else:
|
||||||
# show the diff
|
print(
|
||||||
print(f"Failed ! Files {file1} and {file2} do not match.")
|
f"Failed! Files {file1} and {file2} do not match. Copying content from {file1} to {file2}."
|
||||||
print("\nDiff")
|
)
|
||||||
show_diff(file1, file2)
|
copy_content(file1, file2)
|
||||||
|
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
|
|
||||||
|
def copy_content(source, destination):
|
||||||
|
shutil.copy2(source, destination)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
sys.exit(main())
|
sys.exit(main())
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
"max_output_tokens": 4096,
|
"max_output_tokens": 4096,
|
||||||
"input_cost_per_token": 0.00003,
|
"input_cost_per_token": 0.00003,
|
||||||
"output_cost_per_token": 0.00006,
|
"output_cost_per_token": 0.00006,
|
||||||
"litellm_provider": "openai",
|
"litellm_provider": "opeai",
|
||||||
"mode": "chat"
|
"mode": "chat"
|
||||||
},
|
},
|
||||||
"gpt-4-0314": {
|
"gpt-4-0314": {
|
||||||
|
|
|
@ -5,7 +5,7 @@
|
||||||
"max_output_tokens": 4096,
|
"max_output_tokens": 4096,
|
||||||
"input_cost_per_token": 0.00003,
|
"input_cost_per_token": 0.00003,
|
||||||
"output_cost_per_token": 0.00006,
|
"output_cost_per_token": 0.00006,
|
||||||
"litellm_provider": "openai",
|
"litellm_provider": "opeai",
|
||||||
"mode": "chat"
|
"mode": "chat"
|
||||||
},
|
},
|
||||||
"gpt-4-0314": {
|
"gpt-4-0314": {
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue