Add scripts

This commit is contained in:
2025-07-19 19:08:14 +09:00
commit aed368c6fe
3 changed files with 547 additions and 0 deletions

7
README.md Normal file
View File

@@ -0,0 +1,7 @@
# LLMProxy
Cloudflare Worker script that proxies various OpenAI-compatible LLM APIs.
Automatically detects upstream provider based on model name
Anthropic API is provided via OpenAI compat layer, that is also served in CF Worker (see anthropic.js)

308
anthropic.js Normal file
View File

@@ -0,0 +1,308 @@
export default {
async fetch(req, env) {
return handleRequest(req, env);
}
}
const CLAUDE_API_KEY = '';
const CLAUDE_BASE_URL = 'https://api.anthropic.com/v1/messages';
const MAX_TOKENS = 4096;
function getAPIKey(headers) {
const authorization = headers.authorization;
if (authorization) {
return authorization.split(' ')[1] || CLAUDE_API_KEY;
}
return CLAUDE_API_KEY;
}
function formatStreamResponseJson(claudeResponse) {
switch (claudeResponse.type) {
case 'message_start':
return {
id: claudeResponse.message.id,
model: claudeResponse.message.model,
inputTokens: claudeResponse.message.usage.input_tokens,
};
case 'content_block_start':
case 'ping':
return null;
case 'content_block_delta':
return {
content: claudeResponse.delta.text,
};
case 'content_block_stop':
return null;
case 'message_delta':
return {
stopReason: claudeResponse.delta.stop_reason,
outputTokens: claudeResponse.usage.output_tokens,
};
case 'message_stop':
return null;
default:
return null;
}
}
function claudeToChatGPTResponse(claudeResponse, metaInfo, stream = false) {
const timestamp = Math.floor(Date.now() / 1000);
const completionTokens = metaInfo.outputTokens || 0;
const promptTokens = metaInfo.inputTokens || 0;
if (metaInfo.stopReason && stream) {
return {
id: metaInfo.id,
object: 'chat.completion.chunk',
created: timestamp,
model: metaInfo.model,
choices: [
{
index: 0,
delta: {},
logprobs: null,
finish_reason: 'stop',
},
],
usage: {
prompt_tokens: promptTokens,
completion_tokens: completionTokens,
total_tokens: promptTokens + completionTokens,
},
};
}
const content = claudeResponse.content;
const result = {
id: metaInfo.id || 'unknown',
created: timestamp,
model: metaInfo.model,
usage: {
prompt_tokens: promptTokens,
completion_tokens: completionTokens,
total_tokens: promptTokens + completionTokens,
},
choices: [
{
index: 0,
finish_reason: metaInfo.stopReason === 'end_turn' ? 'stop' : null,
},
],
};
const message = {
role: 'assistant',
content: content || '',
};
if (!stream) {
result.object = 'chat.completion';
result.choices[0].message = message;
} else {
result.object = 'chat.completion.chunk';
result.choices[0].delta = message;
}
return result;
}
async function streamJsonResponseBodies(response, writable, model) {
const reader = response.body.getReader();
const writer = writable.getWriter();
const encoder = new TextEncoder();
const decoder = new TextDecoder();
let buffer = '';
const metaInfo = {
model,
};
while (true) {
const { done, value } = await reader.read();
if (done) {
writer.write(encoder.encode('data: [DONE]'));
break;
}
const currentText = decoder.decode(value, { stream: true }); // stream: true is important here,fix the bug of incomplete line
buffer += currentText;
console.log('streamJsonResponseBodies\n', buffer);
const regex = /event:\s*.*?\s*\ndata:\s*(.*?)(?=\n\n|\s*$)/gs;
let match;
while ((match = regex.exec(buffer)) !== null) {
try {
const decodedLine = JSON.parse(match[1].trim());
const formatedChunk = formatStreamResponseJson(decodedLine);
if (formatedChunk === null) {
continue;
}
metaInfo.id = formatedChunk.id ?? metaInfo.id;
metaInfo.model = formatedChunk.model ?? metaInfo.model;
metaInfo.inputTokens =
formatedChunk.inputTokens ?? metaInfo.inputTokens;
metaInfo.outputTokens =
formatedChunk.outputTokens ?? metaInfo.outputTokens;
metaInfo.stopReason = formatedChunk.stopReason ?? metaInfo.stopReason;
const transformedLine = claudeToChatGPTResponse(
formatedChunk,
metaInfo,
true
);
writer.write(
encoder.encode(`data: ${JSON.stringify(transformedLine)}\n\n`)
);
} catch (e) {}
buffer = buffer.slice(match.index + match[0].length);
}
}
await writer.close();
}
async function handleRequest(request, env) {
if (request.method === 'GET') {
const path = new URL(request.url).pathname;
if (path === '/v1/models') {
const headers = Object.fromEntries(request.headers);
const apiKey = getAPIKey(headers);
const claudeModels = await fetch('https://api.anthropic.com/v1/models', {
method: 'GET',
headers: {
'x-api-key': apiKey,
'anthropic-version': '2023-06-01',
}
})
var claudeModelsResp = await claudeModels.json();
var claudeModelsList = claudeModelsResp.data.map(model => {
return {
id: model.id,
object: model.type,
owned_by: "Anthropic"
};
});
return new Response(
JSON.stringify({
object: 'list',
data: claudeModelsList,
}),
{
status: 200,
headers: {
'Content-Type': 'application/json',
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Methods': '*',
'Access-Control-Allow-Headers': '*',
'Access-Control-Allow-Credentials': 'true',
}
}
);
}
return new Response('Not Found', { status: 404 });
} else if (request.method === 'OPTIONS') {
return handleOPTIONS();
} else if (request.method === 'POST') {
const headers = Object.fromEntries(request.headers);
const apiKey = getAPIKey(headers);
if (!apiKey) {
return new Response('Not Allowed', {
status: 403,
});
}
const requestBody = await request.json();
const { model, messages, temperature, stop, stream } = requestBody;
const claudeModel = model;
const systemMessage = messages.find((message) => message.role === 'system');
const claudeRequestBody = {
model: claudeModel,
messages: messages.filter((message) => message.role !== 'system'),
temperature,
max_tokens: MAX_TOKENS,
stop_sequences: stop,
system: systemMessage?.content,
stream,
};
const claudeResponse = await fetch(CLAUDE_BASE_URL, {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'x-api-key': apiKey,
'anthropic-version': '2023-06-01',
},
body: JSON.stringify(claudeRequestBody),
});
console.log(request.cf.colo);
console.log({
body: claudeRequestBody,
headers: headers,
});
if (!stream) {
const claudeResponseBody = await claudeResponse.json();
var openAIResponseBody = {}
if (claudeResponseBody.type == "error") {
openAIResponseBody =
{
"error": {
"message": claudeResponseBody.error.message,
"type": claudeResponseBody.error.type,
"param": null,
"code": claudeResponseBody.error.type
}
}
} else {
const formatedResult = {
id: claudeResponseBody.id,
model: claudeResponseBody.model,
inputTokens: claudeResponseBody.usage.input_tokens,
outputTokens: claudeResponseBody.usage.output_tokens,
stopReason: claudeResponseBody.stop_reason,
};
openAIResponseBody = claudeToChatGPTResponse(
{ content: claudeResponseBody.content[0].text },
formatedResult
);
}
if (openAIResponseBody === null) {
return new Response('Error processing Claude response', {
status: 500,
});
}
return new Response(JSON.stringify(openAIResponseBody), {
status: claudeResponse.status,
headers: {
'Content-Type': 'application/json',
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Methods': '*',
'Access-Control-Allow-Headers': '*',
'Access-Control-Allow-Credentials': 'true',
},
});
} else {
// Implement streaming logic here
const { readable, writable } = new TransformStream();
streamJsonResponseBodies(claudeResponse, writable);
return new Response(readable, {
headers: {
'Content-Type': 'text/event-stream',
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Methods': '*',
'Access-Control-Allow-Headers': '*',
'Access-Control-Allow-Credentials': 'true',
},
});
}
} else {
return new Response('Method not allowed', { status: 405 });
}
}
function handleOPTIONS() {
return new Response(null, {
headers: {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Methods': '*',
'Access-Control-Allow-Headers': '*',
'Access-Control-Allow-Credentials': 'true',
},
});
}

232
worker.js Normal file
View File

@@ -0,0 +1,232 @@
const DEBUG = 0
const DEFAULT_PROVIDERS = [
{
NAME: "OpenAI",
ENDPOINT: "https://api.openai.com/v1",
KEY: "REDACTED"
},
{
NAME: "Anthropic", // It needs to be OpenAI compatible
ENDPOINT: "https://claude.api.morgan.kr/v1",
KEY: "REDACTED"
},
{
NAME: "DeepSeek",
ENDPOINT: "https://api.deepseek.com/v1",
KEY: "REDACTED"
},
{
NAME: "GroqCloud",
ENDPOINT: "https://api.groq.com/openai/v1",
KEY: "REDACTED"
},
{
NAME: "Google",
ENDPOINT: "https://generativelanguage.googleapis.com/v1beta/openai",
KEY: "REDACTED"
},
]
const AUTH_KEY = "REDACTED"
export default {
async fetch(request, env, ctx) {
if (request.method === 'OPTIONS') {
return new Response(null, {
status: 204,
headers: corsHeaders()
})
}
const requestClone = request.clone()
// 1. Authorization
const authorization = request.headers.get('authorization');
if (authorization) {
let auth = authorization.split(' ')[1];
if (auth !== AUTH_KEY) {
return new Response('Unauthorized', {
status: 401,
headers: corsHeaders()
})
}
} else {
return new Response('Unauthorized', {
status: 401,
headers: corsHeaders()
})
}
// 2. Process request
try {
const contentType = requestClone.headers.get('content-type') || ''
let body
const path = new URL(request.url).pathname
// Get Providers Config
// IF. Model is cached on KV
let config = await env.KV.get("config");
let PROVIDERS;
if (config) {
PROVIDERS = JSON.parse(config)
} else {
PROVIDERS = DEFAULT_PROVIDERS
await env.KV.put("config", JSON.stringify(DEFAULT_PROVIDERS));
}
// ELSE. Update model on KV
if (path === '/v1/models.reload') {
const allModelsArrays = await Promise.all(
PROVIDERS.map(prov => getProviders(env, prov, true))
);
const allModels = allModelsArrays.flat();
const resp = { object: 'list', data: allModels };
return new Response(JSON.stringify(resp), {
status: 200,
headers: {
...corsHeaders(),
'Content-Type': 'application/json'
}
});
}
// 2-1. Models. (from KV storage?)
else if (path === '/v1/models') {
let allModels = [];
for (let prov of PROVIDERS) {
let model = await getProviders(env, prov);
allModels = allModels.concat(model);
}
let resp = {object: 'list', data: allModels}
return new Response(JSON.stringify(resp), {
status: 200,
headers: {
...corsHeaders(),
'Content-Type': 'application/json'
}
});
}
// 2-2. Completion?
else if (path === '/v1/chat/completions') {
return await handleChatCompletions(requestClone, env, PROVIDERS)
}
else {
return new Response(`Error: Unknown Endpoint`, {
status: 500,
headers: corsHeaders()
})
}
} catch (error) {
return new Response(`Error: ${error.message}`, {
status: 500,
headers: corsHeaders()
})
}
}
}
async function handleChatCompletions(request, env, PROVIDERS) {
// Parse JSON body.
const body = await request.json();
const { model } = body;
console.log(body);
const getProvider = async (model) => {
for (let provider of PROVIDERS) {
const models = await getProviders(env, provider);
if (models.some(m => m.id === model)) {
return provider;
}
}
return null;
};
const provider = await getProvider(model);
if (!provider) {
return new Response('Model not supported by any provider', {
status: 400,
headers: corsHeaders()
});
}
const { ENDPOINT, KEY: apiKey } = provider;
if (!apiKey) {
return new Response('Unauthorized: Invalid Provider', {
status: 401,
headers: corsHeaders()
});
}
if (DEBUG) { console.log({model: model, provider: provider, endpoint: ENDPOINT}) }
const targetHeaders = new Headers(request.headers);
targetHeaders.set('Authorization', `Bearer ${apiKey}`);
targetHeaders.set('Content-Type', 'application/json');
const url = new URL(request.url);
const upstreamUrl = ENDPOINT + url.pathname.replace(/^\/v1\//, '/'); + url.search;
const upstreamResponse = await fetch(upstreamUrl, {
method: request.method,
headers: targetHeaders,
body: JSON.stringify(body)
});
const filteredUpstreamHeaders = Object.fromEntries(
[...upstreamResponse.headers].filter(([key]) =>
!key.toLowerCase().startsWith('access-control-')
)
);
const responseHeaders = {
...filteredUpstreamHeaders,
...corsHeaders()
};
const contentType = upstreamResponse.headers.get('content-type') || '';
if (contentType.includes('text/event-stream')) {
delete responseHeaders['content-length'];
responseHeaders['Cache-Control'] = 'no-cache';
}
// const { readable, writable } = new TransformStream();
// upstreamResponse.body.pipeTo(writable).catch((err) => {
// console.error('Stream error:', err);
// });
return new Response(upstreamResponse.body, {
status: upstreamResponse.status,
statusText: upstreamResponse.statusText,
headers: responseHeaders
});
}
async function getProviders(env, provider, refresh=false) {
let value = await env.KV.get(provider.NAME);
if (value === null || refresh) {
const headers = new Headers();
headers.set('Authorization', `Bearer ${provider.KEY}`)
headers.set('Content-Type', 'application/json')
const response = await fetch(provider.ENDPOINT + "/models", {headers})
let models = (await response.json()).data;
await env.KV.put(provider.NAME, JSON.stringify(models));
value = JSON.stringify(models);
}
return JSON.parse(value);
}
function corsHeaders() {
return {
'Access-Control-Allow-Origin': '*',
'Access-Control-Allow-Methods': 'GET, POST, OPTIONS',
'Access-Control-Allow-Headers': 'Content-Type, Authorization'
}
}