DashX-API / plugins /zai.js
HerzaJ's picture
Create zai.js
55d5adf verified
raw
history blame
9.05 kB
const { Readable, Transform } = require('node:stream');
const crypto = require('node:crypto');
const axios = require('axios');
class EventStreamParser extends Transform {
constructor(options) {
super({ ...options, readableObjectMode: true });
this.buffer = '';
}
_transform(chunk, encoding, callback) {
this.buffer += chunk.toString();
let boundary = this.buffer.indexOf('\n');
while (boundary !== -1) {
const line = this.buffer.substring(0, boundary).trim();
this.buffer = this.buffer.substring(boundary + 1);
if (line.startsWith('data:')) {
const jsonString = line.substring(5).trim();
if (jsonString) {
try {
const parsedJson = JSON.parse(jsonString);
this.push(parsedJson);
} catch (error) {}
}
}
boundary = this.buffer.indexOf('\n');
}
callback();
}
}
class ZAI {
constructor() {
this.ins = axios.create({
baseURL: 'https://chat.z.ai',
headers: {
'user-agent': 'Mozilla/5.0 (Android 15; Mobile; SM-F958; rv:130.0) Gecko/130.0 Firefox/130.0'
}
});
this.t = 'Wi5haSBDaGF0IC0gRnJlZSBBSSBwb3dlcmVkIGJ5IEdMTS00LjYgJiBHTE0tNC01';
this._model = {
'glm-4.6': 'GLM-4-6-API-V1',
'glm-4.5v': 'glm-4.5v',
'glm-4.5': '0727-360B-API',
'glm-4.5-air': '0727-106B-API',
'glm-4-32b': 'main_chat',
'glm-4.1v-9b-thinking': 'GLM-4.1V-Thinking-FlashX',
'z1-rumination': 'deep-research',
'z1-32b': 'zero',
'glm-4-flash': 'glm-4-flash'
};
}
sign(g, v, b, w, m, o, e) {
let [r, i, h, j, t] = [
new URL('/c/' + m, g),
String(Date.now()),
crypto.randomUUID(),
Intl.DateTimeFormat(),
new Date(),
], [p, c] = [
{ timestamp: i, requestId: h, user_id: v },
{ version: "0.0.1", platform: "web", token: b, user_agent: e, language: 'id-ID', languages: 'id-ID,en-US,id,en', timezone: j.resolvedOptions().timeZone, cookie_enabled: 'true', screen_width: '461', screen_height: '1024', screen_resolution: '461x1024', viewport_height: '1051', viewport_width: '543', viewport_size: '543x1051', color_depth: '24', pixel_ratio: '1.328460693359375', current_url: r.href, pathname: r.pathname, search: r.search, hash: r.hash, host: r.host, hostname: r.hostname, protocol: r.protocol, referrer: '', title: w, timezone_offset: String(t.getTimezoneOffset()), local_time: t.toISOString(), utc_time: t.toUTCString(), is_mobile: 'true', is_touch: 'true', max_touch_points: '5', browser_name: 'Chrome', os_name: 'Android' }
], a = {...p, ...c}, [n, x, y, z] = [
new URLSearchParams(a).toString(),
[...Object.entries(p).sort()].join(','),
Buffer.from(String.fromCharCode(...(new Uint8Array((new TextEncoder()).encode(o.trim())))), 'binary').toString('base64'),
Math.floor(Number(i) / 300000)
], d = crypto.createHmac('sha256', 'junjie').update(String(z)).digest('hex'), s = crypto.createHmac('sha256', d).update([x,y,i].join('|')).digest('hex');
return {
signature: s,
params: { ...a, signature_timestamp: i }
};
}
parseStream(streamData) {
return new Promise((resolve, reject) => {
let mainBuffer = [];
let usageData = null;
let toolCallString = '';
const sourceStream = Readable.from(streamData);
const parser = new EventStreamParser();
sourceStream
.pipe(parser)
.on('data', (event) => {
const data = event.data;
if (!data) return;
if (data.usage) {
usageData = data.usage;
}
if (typeof data.edit_index === 'number') {
const index = data.edit_index;
const contentChunk = (data.edit_content || '').split('');
mainBuffer.splice(index, contentChunk.length, ...contentChunk);
} else if (data.delta_content) {
const currentLength = mainBuffer.length;
const contentChunk = data.delta_content.split('');
mainBuffer.splice(currentLength, 0, ...contentChunk);
}
})
.on('end', () => {
const fullOutput = mainBuffer.join('');
const result = {
thinking: '',
answer: '',
search: [],
usage: usageData
};
const toolCallMatch = fullOutput.match(/<glm_block[\s\S]*?<\/glm_block>/);
if (toolCallMatch) {
toolCallString = toolCallMatch[0];
}
const thinkingMatch = fullOutput.match(/<details[^>]*>([\s\S]*?)<\/details>/s);
if (thinkingMatch) {
result.thinking = thinkingMatch[1].trim();
const thinkingEndIndex = thinkingMatch.index + thinkingMatch[0].length;
result.answer = fullOutput.substring(thinkingEndIndex).trim();
} else {
const toolCallEndIndex = toolCallMatch ? toolCallMatch.index + toolCallString.length : 0;
result.answer = fullOutput.substring(toolCallEndIndex).trim();
}
if (toolCallString) {
try {
const jsonContentMatch = toolCallString.match(/<glm_block[^>]*>([\s\S]*)<\/glm_block>/);
if (jsonContentMatch && jsonContentMatch[1]) {
const dt = JSON.parse(jsonContentMatch[1]);
result.search = dt?.data?.browser?.search_result || {};
}
} catch (error) {}
}
result.thinking = result.thinking.replace(/【turn0search(\d+)】/g, '[$1]');
result.answer = result.answer.replace(/【turn0search(\d+)】/g, '[$1]');
resolve(result);
})
.on('error', (err) => {
reject(err);
});
});
}
getModel() {
return Object.keys(this._model);
}
async chat(question, { model = 'glm-4.6', system_prompt = null, search = false, deepthink = false } = {}) {
try {
if (!question) throw new Error('Question is required.');
if (!this._model[model]) throw new Error(`Available models: ${this.getModel().join(', ')}.`);
if (typeof search !== 'boolean') throw new Error('Search must be a boolean.');
if (typeof deepthink !== 'boolean') throw new Error('Deepthink must be a boolean.');
const usr = await this.ins.get('/api/v1/auths/');
const chat_id = crypto.randomUUID();
const xx = this.sign(this.ins.defaults.baseURL, usr.data.id, usr.data.token, Buffer.from(this.t, 'base64').toString(), chat_id, question, this.ins.defaults.headers.common['user-agent']);
const res = await this.ins.post('/api/chat/completions', {
messages: [
...(system_prompt ? [{
role: 'system',
content: system_prompt
}] : []),
{
role: 'user',
content: question
}
],
signature_prompt: question,
features: {
web_search: search,
auto_web_search: search,
enable_thinking: deepthink
},
model: this._model[model],
chat_id: chat_id,
id: crypto.randomUUID(),
stream: true
}, {
headers: {
authorization: `Bearer ${usr.data.token}`,
cookie: usr.headers['set-cookie'].join('; '),
'x-signature': xx.signature,
'x-fe-version': 'prod-fe-1.0.52'
},
params: xx.params,
});
const hasil = await this.parseStream(res.data);
return {
status: true,
data: hasil
};
} catch (error) {
return {
status: false,
msg: error.message,
...(error?.response?.data || error?.response || {})
};
}
}
}
const handler = async (req, res) => {
try {
const { text, model = 'glm-4.6', search = false, deepthink = false, system_prompt } = req.query;
if (!text) {
return res.status(400).json({
author: 'Herza',
success: false,
msg: 'Missing required parameter: text'
});
}
const zai = new ZAI();
const result = await zai.chat(text, {
model,
system_prompt,
search: search === 'true' || search === true,
deepthink: deepthink === 'true' || deepthink === true
});
if (result.status) {
res.json({
author: 'Herza',
success: true,
data: result.data
});
} else {
res.status(500).json({
author: 'Herza',
success: false,
msg: result.msg
});
}
} catch (error) {
res.status(500).json({
author: 'Herza',
success: false,
msg: 'Terjadi kesalahan saat menghubungi AI.'
});
}
};
module.exports = {
name: 'Z.AI Chat',
description: 'Generate responses using Z.AI with multiple GLM models',
type: 'GET',
routes: ['api/AI/zai'],
tags: ['ai', 'zai', 'glm', 'chat'],
parameters: ['text', 'model', 'search', 'deepthink', 'system_prompt'],
enabled: true,
main: ['AI'],
handler
};