forked from phoenix/litellm-mirror
moved script to the workflow folder
This commit is contained in:
parent
ac044c4bc6
commit
874847099a
2 changed files with 1 additions and 1 deletions
|
@ -15,7 +15,7 @@ jobs:
|
|||
pip install aiohttp
|
||||
- name: Update JSON Data
|
||||
run: |
|
||||
python ci_cd/auto_update_price_and_context_window_file.py
|
||||
python ".github/workflows/auto_update_price_and_context_window_file.py"
|
||||
- name: Create Pull Request
|
||||
run: |
|
||||
git add model_prices_and_context_window.json
|
||||
|
|
108
.github/workflows/auto_update_price_and_context_window_file.py
vendored
Normal file
108
.github/workflows/auto_update_price_and_context_window_file.py
vendored
Normal file
|
@ -0,0 +1,108 @@
|
|||
import asyncio
|
||||
import aiohttp
|
||||
import json
|
||||
|
||||
# Asynchronously fetch data from a given URL
|
||||
async def fetch_data(url):
|
||||
try:
|
||||
# Create an asynchronous session
|
||||
async with aiohttp.ClientSession() as session:
|
||||
# Send a GET request to the URL
|
||||
async with session.get(url) as resp:
|
||||
# Raise an error if the response status is not OK
|
||||
resp.raise_for_status()
|
||||
# Parse the response JSON
|
||||
resp_json = await resp.json()
|
||||
print("Fetch the data from URL.")
|
||||
# Return the 'data' field from the JSON response
|
||||
return resp_json['data']
|
||||
except Exception as e:
|
||||
# Print an error message if fetching data fails
|
||||
print("Error fetching data from URL:", e)
|
||||
return None
|
||||
|
||||
# Synchronize local data with remote data
|
||||
def sync_local_data_with_remote(local_data, remote_data):
|
||||
# Update existing keys in local_data with values from remote_data
|
||||
for key in (set(local_data) & set(remote_data)):
|
||||
local_data[key].update(remote_data[key])
|
||||
|
||||
# Add new keys from remote_data to local_data
|
||||
for key in (set(remote_data) - set(local_data)):
|
||||
local_data[key] = remote_data[key]
|
||||
|
||||
# Write data to the json file
|
||||
def write_to_file(file_path, data):
|
||||
try:
|
||||
# Open the file in write mode
|
||||
with open(file_path, "w") as file:
|
||||
# Dump the data as JSON into the file
|
||||
json.dump(data, file, indent=4)
|
||||
print("Values updated successfully.")
|
||||
except Exception as e:
|
||||
# Print an error message if writing to file fails
|
||||
print("Error updating JSON file:", e)
|
||||
|
||||
# Update the existing models and add the missing models
|
||||
def transform_remote_data(data):
|
||||
transformed = {}
|
||||
for row in data:
|
||||
# Create a new dictionary with transformed data
|
||||
obj = {
|
||||
"max_tokens": row["context_length"],
|
||||
"input_cost_per_token": float(row["pricing"]["prompt"]),
|
||||
"output_cost_per_token": float(row["pricing"]["completion"]),
|
||||
"litellm_provider": "openrouter",
|
||||
"mode": "chat"
|
||||
}
|
||||
|
||||
# Add an field 'input_cost_per_image'
|
||||
if "pricing" in row and "image" in row["pricing"]:
|
||||
obj['input_cost_per_image'] = float(row["pricing"]["image"])
|
||||
|
||||
# Add an additional field if the modality is 'multimodal'
|
||||
if row.get('architecture', {}).get('modality') == 'multimodal':
|
||||
obj['supports_vision'] = True
|
||||
|
||||
# Use a composite key to store the transformed object
|
||||
transformed[f'openrouter/{row["id"]}'] = obj
|
||||
|
||||
return transformed
|
||||
|
||||
# Load local data from a specified file
|
||||
def load_local_data(file_path):
|
||||
try:
|
||||
# Open the file in read mode
|
||||
with open(file_path, "r") as file:
|
||||
# Load and return the JSON data
|
||||
return json.load(file)
|
||||
except FileNotFoundError:
|
||||
# Print an error message if the file is not found
|
||||
print("File not found:", file_path)
|
||||
return None
|
||||
except json.JSONDecodeError as e:
|
||||
# Print an error message if JSON decoding fails
|
||||
print("Error decoding JSON:", e)
|
||||
return None
|
||||
|
||||
def main():
|
||||
local_file_path = "model_prices_and_context_window.json" # Path to the local data file
|
||||
url = "https://openrouter.ai/api/v1/models" # URL to fetch remote data
|
||||
|
||||
# Load local data from file
|
||||
local_data = load_local_data(local_file_path)
|
||||
# Fetch remote data asynchronously
|
||||
remote_data = asyncio.run(fetch_data(url))
|
||||
# Transform the fetched remote data
|
||||
remote_data = transform_remote_data(remote_data)
|
||||
|
||||
# If both local and remote data are available, synchronize and save
|
||||
if local_data and remote_data:
|
||||
sync_local_data_with_remote(local_data, remote_data)
|
||||
write_to_file(local_file_path, local_data)
|
||||
else:
|
||||
print("Failed to fetch model data from either local file or URL.")
|
||||
|
||||
# Entry point of the script
|
||||
if __name__ == "__main__":
|
||||
main()
|
Loading…
Add table
Add a link
Reference in a new issue