Spaces:
Paused
Paused
File size: 4,443 Bytes
d31db84 cb2bc0c d31db84 cb2bc0c d1b04f9 d31db84 cb2bc0c d31db84 cb2bc0c d31db84 cb2bc0c d31db84 cb2bc0c d31db84 d1b04f9 cb2bc0c d31db84 cb2bc0c d1b04f9 cb2bc0c d31db84 cb2bc0c d31db84 cb2bc0c d31db84 cb2bc0c d31db84 cb2bc0c d1b04f9 d31db84 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 |
import express from 'express';
import cors from 'cors';
import fetch from 'node-fetch';
const app = express();
// CORS
app.use(cors({
origin: '*',
methods: ['GET', 'POST', 'OPTIONS'],
allowedHeaders: ['Content-Type', 'Authorization']
}));
app.use(express.json());
app.options('*', (req, res) => res.sendStatus(204));
// Список моделей
app.get(['/models', '/v1/models'], (req, res) => {
res.json({
object: 'list',
data: [
// OpenAI — GPT‑4.1 / 4.5
{ id: 'gpt-4.1', object: 'model', created: 0, owned_by: 'helix' },
{ id: 'gpt-4.1-2025-04-14', object: 'model', created: 0, owned_by: 'helix' },
{ id: 'gpt-4.1-mini', object: 'model', created: 0, owned_by: 'helix' },
{ id: 'gpt-4.1-mini-2025-04-14', object: 'model', created: 0, owned_by: 'helix' },
{ id: 'gpt-4.1-nano', object: 'model', created: 0, owned_by: 'helix' },
{ id: 'gpt-4.1-nano-2025-04-14', object: 'model', created: 0, owned_by: 'helix' },
{ id: 'gpt-4.5-preview', object: 'model', created: 0, owned_by: 'helix' },
{ id: 'gpt-4.5-preview-2025-02-27', object: 'model', created: 0, owned_by: 'helix' },
// OpenAI — GPT‑4o
{ id: 'gpt-4o', object: 'model', created: 0, owned_by: 'helix' },
{ id: 'gpt-4o-2024-05-13', object: 'model', created: 0, owned_by: 'helix' },
{ id: 'gpt-4o-2024-08-06', object: 'model', created: 0, owned_by: 'helix' },
{ id: 'gpt-4o-2024-11-20', object: 'model', created: 0, owned_by: 'helix' },
{ id: 'gpt-4o-mini', object: 'model', created: 0, owned_by: 'helix' },
{ id: 'gpt-4o-mini-2024-07-18', object: 'model', created: 0, owned_by: 'helix' },
{ id: 'gpt-4o-search-preview', object: 'model', created: 0, owned_by: 'helix' },
{ id: 'gpt-4o-search-preview-2025-03-11', object: 'model', created: 0, owned_by: 'helix' },
{ id: 'gpt-4o-mini-search-preview', object: 'model', created: 0, owned_by: 'helix' },
{ id: 'gpt-4o-mini-search-preview-2025-03-11', object: 'model', created: 0, owned_by: 'helix' },
// Helix — GPT‑3.5 Turbo
{ id: 'gpt-3.5-turbo', object: 'model', created: 0, owned_by: 'helix' }
]
});
});
// Прокси для чата
app.post(['/chat/completions', '/v1/chat/completions'], async (req, res) => {
const {
model,
messages = [],
stream = false,
temperature,
top_p,
presence_penalty,
frequency_penalty,
...rest
} = req.body;
// Заменили префиксы "用户:" и "AI:" на английские
const historyText = messages
.map(m => (m.role === 'user' ? 'User: ' : 'Assistant: ') + m.content)
.join('\n');
const helixPayload = {
type: 'text',
stream,
provider: getProvider(model),
model,
messages: [
{
role: 'user',
content: { content_type: 'text', parts: [historyText] }
}
],
temperature,
top_p,
presence_penalty,
frequency_penalty,
...rest
};
// Отправляем в Helix
const helixRes = await fetch('https://app.tryhelix.ai/api/v1/sessions/chat', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
Authorization: req.header('authorization') || ''
},
body: JSON.stringify(helixPayload)
});
if (!stream) {
const data = await helixRes.json();
const reply = data?.choices?.[0]?.message?.content ?? '';
return res.status(helixRes.status).json({
id: `chatcmpl-proxy-${data.id ?? Date.now()}`,
object: 'chat.completion',
created: Math.floor(Date.now() / 1000),
model,
choices: [
{
index: 0,
message: { role: 'assistant', content: reply },
finish_reason: 'stop'
}
]
});
}
// Если нужен стрим — прокидываем SSE напрямую
res.status(helixRes.status);
res.set('Content-Type', 'text/event-stream');
helixRes.body.pipe(res);
});
function getProvider(m) {
if (/^gpt-[34]|^gpt-3\.5/.test(m)) return 'openai';
if (/^(llama|phi|aya|gemma|deepseek|qwen)/.test(m)) return 'helix';
return 'togetherai';
}
const PORT = process.env.PORT || 7860;
app.listen(PORT, () => {
console.log(`🚀 Server listening on port ${PORT}`);
}); |