forked from phoenix/litellm-mirror
feat(index.ts): initial commit for proxy edge worker
testing to see if a js worker improves proxy perf (and by how much)
This commit is contained in:
parent
d259c754ef
commit
6c34e48180
5 changed files with 115 additions and 0 deletions
59
litellm-js/proxy/src/index.ts
Normal file
59
litellm-js/proxy/src/index.ts
Normal file
|
@ -0,0 +1,59 @@
|
|||
import { Hono } from 'hono'
|
||||
import { Context } from 'hono';
|
||||
import { bearerAuth } from 'hono/bearer-auth'
|
||||
import OpenAI from "openai";
|
||||
|
||||
const openai = new OpenAI({
|
||||
apiKey: "sk-1234",
|
||||
baseURL: "https://openai-endpoint.ishaanjaffer0324.workers.dev"
|
||||
});
|
||||
|
||||
async function call_proxy() {
|
||||
const completion = await openai.chat.completions.create({
|
||||
messages: [{ role: "system", content: "You are a helpful assistant." }],
|
||||
model: "gpt-3.5-turbo",
|
||||
});
|
||||
|
||||
return completion
|
||||
}
|
||||
|
||||
const app = new Hono()
|
||||
|
||||
// Middleware for API Key Authentication
|
||||
const apiKeyAuth = async (c: Context, next: Function) => {
|
||||
const apiKey = c.req.header('Authorization');
|
||||
if (!apiKey || apiKey !== 'Bearer sk-1234') {
|
||||
return c.text('Unauthorized', 401);
|
||||
}
|
||||
await next();
|
||||
};
|
||||
|
||||
|
||||
app.use('/*', apiKeyAuth)
|
||||
|
||||
|
||||
app.get('/', (c) => {
|
||||
return c.text('Hello Hono!')
|
||||
})
|
||||
|
||||
|
||||
|
||||
|
||||
// Handler for chat completions
|
||||
const chatCompletionHandler = async (c: Context) => {
|
||||
// Assuming your logic for handling chat completion goes here
|
||||
// For demonstration, just returning a simple JSON response
|
||||
const response = await call_proxy()
|
||||
return c.json(response);
|
||||
};
|
||||
|
||||
// Register the above handler for different POST routes with the apiKeyAuth middleware
|
||||
app.post('/v1/chat/completions', chatCompletionHandler);
|
||||
app.post('/chat/completions', chatCompletionHandler);
|
||||
|
||||
// Example showing how you might handle dynamic segments within the URL
|
||||
// Here, using ':model*' to capture the rest of the path as a parameter 'model'
|
||||
app.post('/openai/deployments/:model*/chat/completions', chatCompletionHandler);
|
||||
|
||||
|
||||
export default app
|
Loading…
Add table
Add a link
Reference in a new issue