From 5d0f9fd7498a0d8c50358bd88fbcad8e257793a9 Mon Sep 17 00:00:00 2001 From: ishaan-jaff Date: Fri, 11 Aug 2023 13:43:41 -0700 Subject: [PATCH] new v litellm for render --- .DS_Store | Bin 6148 -> 8196 bytes .circleci/config.yml | 9 +-------- cookbook/proxy-server/readme.md | 15 ++++++++++++++- cookbook/proxy-server/requirements.txt | 2 +- 4 files changed, 16 insertions(+), 10 deletions(-) diff --git a/.DS_Store b/.DS_Store index 22073074832468582ce431f5bcd4c2f80be08229..5819da711ed4e7e5053d6fb809bdcddfa3f56a49 100644 GIT binary patch delta 517 zcmaJ;O-sW-5S^H)OS5Q2DZLabc+?PWJ=Y2zdQ$KQ2uX_6u1(B`(o;S3=AndK#fv|{ zLoWUVFa92n`X&&>6x=Y`+1>Z%z1^AH^OGhKX{U*INW`f?@LV_3yGCm5ZrAx7Ya(x; zeAiLFlW&QJk+;P{e_a;R znqsZ~I?-AcCng2J4=JV)cnf!B_PD}&s<1Y|nj%7=Z@_lJxAyS!mnDnGdMO_(Q)|av z3FHtKV>&CWUGtsgyJ=@$FWam-{^ppEc`IUUue*l^? Bfe!!x delta 128 zcmZp1XfcprU|?W$DortDU=RQ@Ie-{Mvv5r;6q~50C<+o_1c`a(TPkdX{b5aU5MGHj0L HnZpbKU7Zx@ diff --git a/.circleci/config.yml b/.circleci/config.yml index 421460456..47339bfb8 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -73,14 +73,7 @@ workflows: version: 2 build_and_test: jobs: - - local_testing: - filters: - paths: - ignore: - - "README.md" - - "docs" - - "cookbook" - + - local_testing - publish_to_pypi: requires: - local_testing diff --git a/cookbook/proxy-server/readme.md b/cookbook/proxy-server/readme.md index 37087a7a6..7866b7b63 100644 --- a/cookbook/proxy-server/readme.md +++ b/cookbook/proxy-server/readme.md @@ -1,17 +1,26 @@ +<<<<<<< HEAD # Proxy Server for Azure, Llama2, OpenAI, Claude, Hugging Face, Replicate Models [![PyPI Version](https://img.shields.io/pypi/v/litellm.svg)](https://pypi.org/project/litellm/) [![PyPI Version](https://img.shields.io/badge/stable%20version-v0.1.345-blue?color=green&link=https://pypi.org/project/litellm/0.1.1/)](https://pypi.org/project/litellm/0.1.1/) ![Downloads](https://img.shields.io/pypi/dm/litellm) [![litellm](https://img.shields.io/badge/%20%F0%9F%9A%85%20liteLLM-OpenAI%7CAzure%7CAnthropic%7CPalm%7CCohere%7CReplicate%7CHugging%20Face-blue?color=green)](https://github.com/BerriAI/litellm) +======= +# Proxy Server for Chat API +>>>>>>> d1ff082 (new v litellm for render) -[![](https://dcbadge.vercel.app/api/server/wuPM9dRgDw)](https://discord.gg/wuPM9dRgDw) +This repository contains a proxy server that interacts with OpenAI's Chat API and other similar APIs to facilitate chat-based language models. The server allows you to easily integrate chat completion capabilities into your applications. The server is built using Python and the Flask framework. +<<<<<<< HEAD # Proxy Server for Chat API This repository contains a proxy server that interacts with OpenAI's Chat API and other similar APIs to facilitate chat-based language models. The server allows you to easily integrate chat completion capabilities into your applications. The server is built using Python and the Flask framework. ## Installation +======= +## Installation + +>>>>>>> d1ff082 (new v litellm for render) To set up and run the proxy server locally, follow these steps: 1. Clone this repository to your local machine: @@ -90,4 +99,8 @@ google/palm-2-chat-bison Vertex Models: chat-bison chat-bison@001 +<<<<<<< HEAD Refer to the model endpoint compatibility table for more details. +======= +Refer to the model endpoint compatibility table for more details. +>>>>>>> d1ff082 (new v litellm for render) diff --git a/cookbook/proxy-server/requirements.txt b/cookbook/proxy-server/requirements.txt index 4bfb4f1f9..80c1addca 100644 --- a/cookbook/proxy-server/requirements.txt +++ b/cookbook/proxy-server/requirements.txt @@ -1,4 +1,4 @@ flask flask_cors waitress -litellm \ No newline at end of file +litellm==0.1.381 \ No newline at end of file