Spaces:
Sleeping
Sleeping
File size: 1,638 Bytes
530a943 0f9935b 530a943 c2996fe 0f9935b c2996fe 188790c c2996fe 0f9935b c2996fe 530a943 3bf36ef 530a943 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 | import { Hono } from '@hono/hono';
import { HfInference } from '@huggingface/inference';
async function _fetch(
input: Request | URL | string,
init?: RequestInit & {
client: Deno.HttpClient;
},
): Promise<Response> {
init?.headers && (init.headers = {
...init?.headers,
'User-Agent': 'unknown/None; hf_hub/0.25.2; python/3.10.16',
// 'x-amzn-trace-id': '68575306-b95d-4203-8d36-a233a49fb50d',
});
console.log(input, init);
return await fetch(input, init);
}
const app = new Hono();
// const client = new HfInference();
app.get('/', async (c) => {
// const chatCompletion = await client.chatCompletion({
// model: 'microsoft/Phi-3.5-mini-instruct',
// messages: [
// {
// role: 'user',
// content: 'What is the capital of France?',
// },
// ],
// provider: 'hf-inference',
// max_tokens: 500,
// }, {
// fetch: _fetch as typeof fetch,
// });
// console.log(chatCompletion.choices[0].message);
const response = await fetch('https://api-inference.huggingface.co/v1/chat/completions', {
method: 'POST',
headers: {
'content-type': 'application/json',
'user-agent': 'unknown/None; hf_hub/0.25.2; python/3.10.16',
},
body: JSON.stringify({
'model': 'microsoft/Phi-3.5-mini-instruct',
'messages': [
{
'role': 'user',
'content': 'What is the capital of France?',
},
],
'max_tokens': 500,
'stream': false,
}),
});
console.log(await response.text());
return c.text(`Hello Hono! ${crypto.randomUUID()}`);
});
Deno.serve({ port: 7860 }, app.fetch);
|