forked from phoenix/litellm-mirror
Merge pull request #3630 from paneru-rajan/Issue-2407-auto-update-pricing
Added script for auto updating the pricing with the pricings of openrouter.ai
This commit is contained in:
commit
e6b9945ad9
2 changed files with 149 additions and 0 deletions
28
.github/workflows/auto_update_price_and_context_window.yml
vendored
Normal file
28
.github/workflows/auto_update_price_and_context_window.yml
vendored
Normal file
|
@ -0,0 +1,28 @@
|
||||||
|
name: Updates model_prices_and_context_window.json and Create Pull Request
|
||||||
|
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
- cron: "0 0 * * 0" # Run every Sundays at midnight
|
||||||
|
#- cron: "0 0 * * *" # Run daily at midnight
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
auto_update_price_and_context_window:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
- name: Install Dependencies
|
||||||
|
run: |
|
||||||
|
pip install aiohttp
|
||||||
|
- name: Update JSON Data
|
||||||
|
run: |
|
||||||
|
python ".github/workflows/auto_update_price_and_context_window_file.py"
|
||||||
|
- name: Create Pull Request
|
||||||
|
run: |
|
||||||
|
git add model_prices_and_context_window.json
|
||||||
|
git commit -m "Update model_prices_and_context_window.json file: $(date +'%Y-%m-%d')"
|
||||||
|
gh pr create --title "Update model_prices_and_context_window.json file" \
|
||||||
|
--body "Automated update for model_prices_and_context_window.json" \
|
||||||
|
--head auto-update-price-and-context-window-$(date +'%Y-%m-%d') \
|
||||||
|
--base main
|
||||||
|
env:
|
||||||
|
GH_TOKEN: ${{ secrets.GH_TOKEN }}
|
121
.github/workflows/auto_update_price_and_context_window_file.py
vendored
Normal file
121
.github/workflows/auto_update_price_and_context_window_file.py
vendored
Normal file
|
@ -0,0 +1,121 @@
|
||||||
|
import asyncio
|
||||||
|
import aiohttp
|
||||||
|
import json
|
||||||
|
|
||||||
|
# Asynchronously fetch data from a given URL
|
||||||
|
async def fetch_data(url):
|
||||||
|
try:
|
||||||
|
# Create an asynchronous session
|
||||||
|
async with aiohttp.ClientSession() as session:
|
||||||
|
# Send a GET request to the URL
|
||||||
|
async with session.get(url) as resp:
|
||||||
|
# Raise an error if the response status is not OK
|
||||||
|
resp.raise_for_status()
|
||||||
|
# Parse the response JSON
|
||||||
|
resp_json = await resp.json()
|
||||||
|
print("Fetch the data from URL.")
|
||||||
|
# Return the 'data' field from the JSON response
|
||||||
|
return resp_json['data']
|
||||||
|
except Exception as e:
|
||||||
|
# Print an error message if fetching data fails
|
||||||
|
print("Error fetching data from URL:", e)
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Synchronize local data with remote data
|
||||||
|
def sync_local_data_with_remote(local_data, remote_data):
|
||||||
|
# Update existing keys in local_data with values from remote_data
|
||||||
|
for key in (set(local_data) & set(remote_data)):
|
||||||
|
local_data[key].update(remote_data[key])
|
||||||
|
|
||||||
|
# Add new keys from remote_data to local_data
|
||||||
|
for key in (set(remote_data) - set(local_data)):
|
||||||
|
local_data[key] = remote_data[key]
|
||||||
|
|
||||||
|
# Write data to the json file
|
||||||
|
def write_to_file(file_path, data):
|
||||||
|
try:
|
||||||
|
# Open the file in write mode
|
||||||
|
with open(file_path, "w") as file:
|
||||||
|
# Dump the data as JSON into the file
|
||||||
|
json.dump(data, file, indent=4)
|
||||||
|
print("Values updated successfully.")
|
||||||
|
except Exception as e:
|
||||||
|
# Print an error message if writing to file fails
|
||||||
|
print("Error updating JSON file:", e)
|
||||||
|
|
||||||
|
# Update the existing models and add the missing models
|
||||||
|
def transform_remote_data(data):
|
||||||
|
transformed = {}
|
||||||
|
for row in data:
|
||||||
|
# Add the fields 'max_tokens' and 'input_cost_per_token'
|
||||||
|
obj = {
|
||||||
|
"max_tokens": row["context_length"],
|
||||||
|
"input_cost_per_token": float(row["pricing"]["prompt"]),
|
||||||
|
}
|
||||||
|
|
||||||
|
# Add 'max_output_tokens' as a field if it is not None
|
||||||
|
if "top_provider" in row and "max_completion_tokens" in row["top_provider"] and row["top_provider"]["max_completion_tokens"] is not None:
|
||||||
|
obj['max_output_tokens'] = int(row["top_provider"]["max_completion_tokens"])
|
||||||
|
|
||||||
|
# Add the field 'output_cost_per_token'
|
||||||
|
obj.update({
|
||||||
|
"output_cost_per_token": float(row["pricing"]["completion"]),
|
||||||
|
})
|
||||||
|
|
||||||
|
# Add field 'input_cost_per_image' if it exists and is non-zero
|
||||||
|
if "pricing" in row and "image" in row["pricing"] and float(row["pricing"]["image"]) != 0.0:
|
||||||
|
obj['input_cost_per_image'] = float(row["pricing"]["image"])
|
||||||
|
|
||||||
|
# Add the fields 'litellm_provider' and 'mode'
|
||||||
|
obj.update({
|
||||||
|
"litellm_provider": "openrouter",
|
||||||
|
"mode": "chat"
|
||||||
|
})
|
||||||
|
|
||||||
|
# Add the 'supports_vision' field if the modality is 'multimodal'
|
||||||
|
if row.get('architecture', {}).get('modality') == 'multimodal':
|
||||||
|
obj['supports_vision'] = True
|
||||||
|
|
||||||
|
# Use a composite key to store the transformed object
|
||||||
|
transformed[f'openrouter/{row["id"]}'] = obj
|
||||||
|
|
||||||
|
return transformed
|
||||||
|
|
||||||
|
|
||||||
|
# Load local data from a specified file
|
||||||
|
def load_local_data(file_path):
|
||||||
|
try:
|
||||||
|
# Open the file in read mode
|
||||||
|
with open(file_path, "r") as file:
|
||||||
|
# Load and return the JSON data
|
||||||
|
return json.load(file)
|
||||||
|
except FileNotFoundError:
|
||||||
|
# Print an error message if the file is not found
|
||||||
|
print("File not found:", file_path)
|
||||||
|
return None
|
||||||
|
except json.JSONDecodeError as e:
|
||||||
|
# Print an error message if JSON decoding fails
|
||||||
|
print("Error decoding JSON:", e)
|
||||||
|
return None
|
||||||
|
|
||||||
|
def main():
|
||||||
|
local_file_path = "model_prices_and_context_window.json" # Path to the local data file
|
||||||
|
url = "https://openrouter.ai/api/v1/models" # URL to fetch remote data
|
||||||
|
|
||||||
|
# Load local data from file
|
||||||
|
local_data = load_local_data(local_file_path)
|
||||||
|
# Fetch remote data asynchronously
|
||||||
|
remote_data = asyncio.run(fetch_data(url))
|
||||||
|
# Transform the fetched remote data
|
||||||
|
remote_data = transform_remote_data(remote_data)
|
||||||
|
|
||||||
|
# If both local and remote data are available, synchronize and save
|
||||||
|
if local_data and remote_data:
|
||||||
|
sync_local_data_with_remote(local_data, remote_data)
|
||||||
|
write_to_file(local_file_path, local_data)
|
||||||
|
else:
|
||||||
|
print("Failed to fetch model data from either local file or URL.")
|
||||||
|
|
||||||
|
# Entry point of the script
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
Loading…
Add table
Add a link
Reference in a new issue