ai-detector-pgx / example.js
darwinkernelpanic's picture
Upload example.js with huggingface_hub
00115f9 verified
// AI Detector Example - JavaScript/Node.js
// Install: npm install @xenova/transformers onnxruntime-node
const { AutoTokenizer } = require('@xenova/transformers');
const ort = require('onnxruntime-node');
async function detectAI(text) {
// Tokenize
const tokenizer = await AutoTokenizer.from_pretrained('darwinkernelpanic/ai-detector-pgx');
const encoded = await tokenizer(text, {
padding: true,
truncation: true,
max_length: 512,
return_tensors: 'pt'
});
// Load ONNX model
const session = await ort.InferenceSession.create('./model.onnx');
// Prepare inputs
const inputIds = new ort.Tensor('int64', encoded.input_ids.data, encoded.input_ids.dims);
const attentionMask = new ort.Tensor('int64', encoded.attention_mask.data, encoded.attention_mask.dims);
// Run inference
const results = await session.run({
input_ids: inputIds,
attention_mask: attentionMask
});
// Softmax
const logits = results.logits.data;
const exp0 = Math.exp(logits[0]);
const exp1 = Math.exp(logits[1]);
const aiProb = exp1 / (exp0 + exp1);
return {
ai_probability: aiProb,
is_ai: aiProb > 0.5,
confidence: Math.abs(aiProb - 0.5) * 2
};
}
// Run example
detectAI("The mitochondria is the powerhouse of the cell...")
.then(r => console.log('AI Probability:', (r.ai_probability * 100).toFixed(1) + '%'));
module.exports = { detectAI };