Spaces:
Sleeping
Sleeping
| const express = require('express'); | |
| const bodyParser = require('body-parser'); | |
| const { LlamaModel, LlamaContext, LlamaChatSession } = require('node-llama-cpp'); | |
| const path = require('path'); | |
| const app = express(); | |
| const port = 7860; | |
| app.use(bodyParser.json()); | |
| // Test Express API GET method with parameters | |
| app.get('/api/test', async (req, res) => { | |
| const reqData = req.query; | |
| res.json({ | |
| message: "Test getApiResponse GET success!", | |
| method: "GET", | |
| reqData, | |
| }); | |
| }); | |
| // Test Express API POST method with variables | |
| app.post('/api/test', async (req, res) => { | |
| const reqData = req.body; | |
| const userInput = reqData.userInput; | |
| const model = new LlamaModel({ | |
| modelPath: path.join( | |
| process.cwd(), | |
| "model", | |
| "orca-mini-3b-gguf2-q4_0.gguf" | |
| ), | |
| }); | |
| const context = new LlamaContext({ model }); | |
| const session = new LlamaChatSession({ context }); | |
| const aiAnswer = await session.prompt(userInput); | |
| console.log(reqData); | |
| console.log(aiAnswer); | |
| res.json({ aiAnswer }); | |
| }); | |
| app.listen(port, () => { | |
| console.log(`Express server is running on port ${port}`); | |
| }); | |