magicboris commited on
Commit
d1b04f9
·
verified ·
1 Parent(s): ebdffcc

Update server.js

Browse files
Files changed (1) hide show
  1. server.js +109 -85
server.js CHANGED
@@ -1,56 +1,48 @@
1
- import express from 'express'
2
- import cors from 'cors'
3
- import fetch from 'node-fetch'
4
-
5
- const app = express()
6
-
7
- // Считываем ключ Helix из переменных окружения
8
- const HELIX_API_KEY = process.env.HELIX_API_KEY || ''
9
-
10
- // CORS
11
- app.use(cors({
12
- origin: '*',
13
- methods: ['GET', 'POST', 'OPTIONS'],
14
- allowedHeaders: ['Content-Type', 'Authorization']
15
- }))
16
- app.use(express.json())
17
- app.options('*', (req, res) => res.sendStatus(204))
18
-
19
- // Список доступных моделей
20
- app.get(['/models', '/v1/models'], (req, res) => {
21
- res.json({
 
 
 
 
 
 
 
22
  object: 'list',
23
  data: [
24
- // OpenAI GPT‑4.1 / 4.5
25
- { id: 'gpt-4.1', object: 'model', created: 0, owned_by: 'helix' },
26
- { id: 'gpt-4.1-2025-04-14', object: 'model', created: 0, owned_by: 'helix' },
27
- { id: 'gpt-4.1-mini', object: 'model', created: 0, owned_by: 'helix' },
28
- { id: 'gpt-4.1-mini-2025-04-14', object: 'model', created: 0, owned_by: 'helix' },
29
- { id: 'gpt-4.1-nano', object: 'model', created: 0, owned_by: 'helix' },
30
- { id: 'gpt-4.1-nano-2025-04-14', object: 'model', created: 0, owned_by: 'helix' },
31
- { id: 'gpt-4.5-preview', object: 'model', created: 0, owned_by: 'helix' },
32
- { id: 'gpt-4.5-preview-2025-02-27', object: 'model', created: 0, owned_by: 'helix' },
33
-
34
- // OpenAI — GPT‑4o
35
- { id: 'gpt-4o', object: 'model', created: 0, owned_by: 'helix' },
36
- { id: 'gpt-4o-2024-05-13', object: 'model', created: 0, owned_by: 'helix' },
37
- { id: 'gpt-4o-2024-08-06', object: 'model', created: 0, owned_by: 'helix' },
38
- { id: 'gpt-4o-2024-11-20', object: 'model', created: 0, owned_by: 'helix' },
39
- { id: 'gpt-4o-mini', object: 'model', created: 0, owned_by: 'helix' },
40
- { id: 'gpt-4o-mini-2024-07-18', object: 'model', created: 0, owned_by: 'helix' },
41
- { id: 'gpt-4o-search-preview', object: 'model', created: 0, owned_by: 'helix' },
42
- { id: 'gpt-4o-search-preview-2025-03-11', object: 'model', created: 0, owned_by: 'helix' },
43
- { id: 'gpt-4o-mini-search-preview', object: 'model', created: 0, owned_by: 'helix' },
44
- { id: 'gpt-4o-mini-search-preview-2025-03-11', object: 'model', created: 0, owned_by: 'helix' },
45
-
46
- // Helix — GPT‑3.5 Turbo
47
  { id: 'gpt-3.5-turbo', object: 'model', created: 0, owned_by: 'helix' }
48
  ]
49
- })
50
- })
 
 
 
 
 
 
 
 
 
51
 
52
- // Прокси для /chat/completions
53
- app.post(['/chat/completions', '/v1/chat/completions'], async (req, res) => {
54
  const {
55
  model,
56
  messages = [],
@@ -60,54 +52,43 @@ app.post(['/chat/completions', '/v1/chat/completions'], async (req, res) => {
60
  presence_penalty,
61
  frequency_penalty,
62
  ...rest
63
- } = req.body
64
 
65
- // Собираем историю, заменяя китайские префиксы на English
66
  const historyText = messages
67
- .map(m => (m.role === 'user' ? 'User: ' : 'Assistant: ') + m.content)
68
- .join('\n')
69
 
70
- // Формируем полезную нагрузку для Helix
71
  const helixPayload = {
72
  type: 'text',
73
  stream,
74
  provider: getProvider(model),
75
  model,
76
  messages: [
77
- {
78
- role: 'user',
79
- content: { content_type: 'text', parts: [historyText] }
80
- }
81
  ],
82
  temperature,
83
  top_p,
84
  presence_penalty,
85
  frequency_penalty,
86
  ...rest
87
- }
88
-
89
- // Заголовок авторизации для Helix
90
- const authHeader = HELIX_API_KEY
91
- ? `Bearer ${HELIX_API_KEY}`
92
- : (req.header('authorization') || '')
93
 
94
- // Отправляем запрос в Helix
95
  const helixRes = await fetch(
96
  'https://app.tryhelix.ai/api/v1/sessions/chat',
97
  {
98
  method: 'POST',
99
  headers: {
100
  'Content-Type': 'application/json',
101
- Authorization: authHeader
102
  },
103
  body: JSON.stringify(helixPayload)
104
  }
105
- )
106
 
107
  if (!stream) {
108
- const data = await helixRes.json()
109
- const reply = data?.choices?.[0]?.message?.content ?? ''
110
- return res.status(helixRes.status).json({
111
  id: `chatcmpl-proxy-${data.id ?? Date.now()}`,
112
  object: 'chat.completion',
113
  created: Math.floor(Date.now() / 1000),
@@ -119,23 +100,66 @@ app.post(['/chat/completions', '/v1/chat/completions'], async (req, res) => {
119
  finish_reason: 'stop'
120
  }
121
  ]
122
- })
 
123
  }
124
 
125
- // При стриме прокидываем SSE напрямую
126
- res.status(helixRes.status)
127
- res.set('Content-Type', 'text/event-stream')
128
- helixRes.body.pipe(res)
129
- })
130
-
131
- // Определяем провайдера по имени модели
132
- function getProvider(modelId) {
133
- if (/^gpt-[34]|^gpt-3\.5/.test(modelId)) return 'openai'
134
- if (/^(llama|phi|aya|gemma|deepseek|qwen)/.test(modelId)) return 'helix'
135
- return 'togetherai'
 
 
 
 
 
 
 
 
 
 
 
 
136
  }
137
 
138
- const PORT = process.env.PORT || 7860
139
- app.listen(PORT, () => {
140
- console.log(`🚀 Server listening on port ${PORT}`)
141
- })
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ /**
2
+ * Helix ↔︎ OpenAI 代理(全路径 + 流式过滤)
3
+ * 2025‑04‑19
4
+ */
5
+ addEventListener('fetch', e => e.respondWith(router(e.request)));
6
+
7
+ async function router(req) {
8
+ const url = new URL(req.url);
9
+ const p = url.pathname.replace(/\/+$/, '');
10
+ const m = req.method;
11
+
12
+ if (m === 'OPTIONS') return cors(new Response(null, { status: 204 }));
13
+
14
+ if ((p === '/models' || p === '/v1/models') && m === 'GET')
15
+ return cors(modelsList());
16
+
17
+ if (
18
+ (p === '/chat/completions' || p === '/v1/chat/completions') &&
19
+ m === 'POST'
20
+ )
21
+ return cors(await chatProxy(req));
22
+
23
+ return cors(json({ error: 'Not Found', path: p }, 404));
24
+ }
25
+
26
+ /* ---------- 模型列表 ---------- */
27
+ function modelsList() {
28
+ return json({
29
  object: 'list',
30
  data: [
31
+ { id: 'gpt-4o', object: 'model', created: 0, owned_by: 'helix' },
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
32
  { id: 'gpt-3.5-turbo', object: 'model', created: 0, owned_by: 'helix' }
33
  ]
34
+ });
35
+ }
36
+
37
+ /* ---------- 聊天 ---------- */
38
+ async function chatProxy(req) {
39
+ let body;
40
+ try {
41
+ body = await req.json();
42
+ } catch {
43
+ return json({ error: 'Bad JSON' }, 400);
44
+ }
45
 
 
 
46
  const {
47
  model,
48
  messages = [],
 
52
  presence_penalty,
53
  frequency_penalty,
54
  ...rest
55
+ } = body;
56
 
 
57
  const historyText = messages
58
+ .map(m => (m.role === 'user' ? '用户:' : 'AI:') + m.content)
59
+ .join('\n');
60
 
 
61
  const helixPayload = {
62
  type: 'text',
63
  stream,
64
  provider: getProvider(model),
65
  model,
66
  messages: [
67
+ { role: 'user', content: { content_type: 'text', parts: [historyText] } }
 
 
 
68
  ],
69
  temperature,
70
  top_p,
71
  presence_penalty,
72
  frequency_penalty,
73
  ...rest
74
+ };
 
 
 
 
 
75
 
 
76
  const helixRes = await fetch(
77
  'https://app.tryhelix.ai/api/v1/sessions/chat',
78
  {
79
  method: 'POST',
80
  headers: {
81
  'Content-Type': 'application/json',
82
+ Authorization: req.headers.get('Authorization') || ''
83
  },
84
  body: JSON.stringify(helixPayload)
85
  }
86
+ );
87
 
88
  if (!stream) {
89
+ const data = await helixRes.json();
90
+ const reply = data?.choices?.[0]?.message?.content ?? '';
91
+ const openai = {
92
  id: `chatcmpl-proxy-${data.id ?? Date.now()}`,
93
  object: 'chat.completion',
94
  created: Math.floor(Date.now() / 1000),
 
100
  finish_reason: 'stop'
101
  }
102
  ]
103
+ };
104
+ return json(openai, helixRes.status);
105
  }
106
 
107
+ /* ---- 流式过滤 ---- */
108
+ const filteredStream = helixRes.body
109
+ .pipeThrough(new TextDecoderStream())
110
+ .pipeThrough(sseLineSplitter())
111
+ .pipeThrough(new TransformStream({
112
+ transform(line, controller) {
113
+ if (!line.startsWith('data:')) { controller.enqueue(line); return; }
114
+ const payload = line.slice(5).trim();
115
+ if (payload === '[DONE]') { controller.enqueue(line); return; }
116
+ try {
117
+ const obj = JSON.parse(payload);
118
+ if (obj.choices === null || (Array.isArray(obj.choices) && obj.choices.length === 0))
119
+ return;
120
+ } catch { /* ignore */ }
121
+ controller.enqueue(line);
122
+ }
123
+ }))
124
+ .pipeThrough(new TextEncoderStream());
125
+
126
+ return new Response(filteredStream, {
127
+ status: helixRes.status,
128
+ headers: { 'Content-Type': 'text/event-stream' }
129
+ });
130
  }
131
 
132
+ /* ---------- 工具 ---------- */
133
+ function getProvider(m) {
134
+ if (/^gpt-[34]|^gpt-3\.5/.test(m)) return 'openai';
135
+ if (/^(llama|phi|aya|gemma|deepseek|qwen)/.test(m)) return 'helix';
136
+ return 'togetherai';
137
+ }
138
+ const json = (o, s = 200) =>
139
+ new Response(JSON.stringify(o), {
140
+ status: s,
141
+ headers: { 'Content-Type': 'application/json' }
142
+ });
143
+
144
+ function cors(res) {
145
+ const h = new Headers(res.headers);
146
+ h.set('Access-Control-Allow-Origin', '*');
147
+ h.set('Access-Control-Allow-Methods', 'GET, POST, OPTIONS');
148
+ h.set('Access-Control-Allow-Headers', 'Content-Type, Authorization');
149
+ return new Response(res.body, { ...res, headers: h });
150
+ }
151
+
152
+ function sseLineSplitter() {
153
+ let buf = '';
154
+ return new TransformStream({
155
+ transform(chunk, controller) {
156
+ buf += chunk;
157
+ const lines = buf.split('\n');
158
+ buf = lines.pop();
159
+ for (const l of lines) controller.enqueue(l + '\n');
160
+ },
161
+ flush(controller) {
162
+ if (buf) controller.enqueue(buf);
163
+ }
164
+ });
165
+ }