Spaces:
Sleeping
Sleeping
T1ckbase
commited on
Commit
·
79f7ac0
1
Parent(s):
f9fbc16
image
Browse files- main.ts +66 -9
- utils/string.ts +2 -2
main.ts
CHANGED
|
@@ -1,8 +1,10 @@
|
|
| 1 |
import { Hono } from '@hono/hono';
|
| 2 |
import { logger } from '@hono/hono/logger';
|
|
|
|
| 3 |
import { generateImage as fluxGenerateImage } from './gradio-api/flux.ts';
|
| 4 |
import { parseResolution } from './utils/string.ts';
|
| 5 |
import OpenAI from '@openai/openai';
|
|
|
|
| 6 |
|
| 7 |
// https://api-inference.huggingface.co/v1
|
| 8 |
const HF_API_URL = 'https://api-inference.huggingface.co';
|
|
@@ -11,6 +13,8 @@ const JINA_API_URL = 'https://deepsearch.jina.ai';
|
|
| 11 |
const app = new Hono();
|
| 12 |
|
| 13 |
app.use(logger());
|
|
|
|
|
|
|
| 14 |
app.get('/', (c) => c.text('Hello Hono!'));
|
| 15 |
|
| 16 |
// LM Studio
|
|
@@ -62,18 +66,71 @@ app.post('/v1/chat/completions', async (c) => {
|
|
| 62 |
});
|
| 63 |
|
| 64 |
app.post('/v1/images/generations', async (c) => {
|
| 65 |
-
const
|
| 66 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 67 |
|
| 68 |
-
|
| 69 |
-
|
| 70 |
-
|
| 71 |
-
|
| 72 |
-
|
| 73 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 74 |
}
|
| 75 |
|
| 76 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 77 |
});
|
| 78 |
|
| 79 |
// Deno.serve({ port: 7860 }, app.fetch);
|
|
|
|
| 1 |
import { Hono } from '@hono/hono';
|
| 2 |
import { logger } from '@hono/hono/logger';
|
| 3 |
+
import { serveStatic } from '@hono/hono/deno';
|
| 4 |
import { generateImage as fluxGenerateImage } from './gradio-api/flux.ts';
|
| 5 |
import { parseResolution } from './utils/string.ts';
|
| 6 |
import OpenAI from '@openai/openai';
|
| 7 |
+
import { encodeBase64 } from '@std/encoding/base64';
|
| 8 |
|
| 9 |
// https://api-inference.huggingface.co/v1
|
| 10 |
const HF_API_URL = 'https://api-inference.huggingface.co';
|
|
|
|
| 13 |
const app = new Hono();
|
| 14 |
|
| 15 |
app.use(logger());
|
| 16 |
+
app.use('/tmp/*', serveStatic({ root: './tmp' }));
|
| 17 |
+
|
| 18 |
app.get('/', (c) => c.text('Hello Hono!'));
|
| 19 |
|
| 20 |
// LM Studio
|
|
|
|
| 66 |
});
|
| 67 |
|
| 68 |
app.post('/v1/images/generations', async (c) => {
|
| 69 |
+
const headers = new Headers(c.req.raw.headers);
|
| 70 |
+
headers.delete('Authorization');
|
| 71 |
+
headers.get('x-use-cache') || headers.set('x-use-cache', 'false');
|
| 72 |
+
console.log('headers:', Object.fromEntries(headers));
|
| 73 |
+
|
| 74 |
+
const params = await c.req.json<OpenAI.ImageGenerateParams>();
|
| 75 |
+
console.log('request body:', params);
|
| 76 |
+
|
| 77 |
+
const { pathname, search } = new URL(c.req.url);
|
| 78 |
+
const targetUrl = `${HF_API_URL}${pathname}${search}/models/${params.model}`;
|
| 79 |
+
|
| 80 |
+
const { width = 1024, height = 1024 } = parseResolution(params.size as string);
|
| 81 |
+
|
| 82 |
+
const requestBody: any = {
|
| 83 |
+
input: params.prompt,
|
| 84 |
+
parameters: {
|
| 85 |
+
width,
|
| 86 |
+
height,
|
| 87 |
+
},
|
| 88 |
+
};
|
| 89 |
|
| 90 |
+
headers.has('guidance_scale') && (requestBody.parameters.guidance_scale = parseFloat(headers.get('guidance_scale')!));
|
| 91 |
+
headers.has('negative_prompt') && (requestBody.parameters.negative_prompt = headers.get('negative_prompt'));
|
| 92 |
+
headers.has('num_inference_steps') && (requestBody.parameters.num_inference_steps = parseInt(headers.get('num_inference_steps')!));
|
| 93 |
+
headers.has('scheduler') && (requestBody.parameters.scheduler = headers.get('scheduler'));
|
| 94 |
+
headers.has('seed') && (requestBody.parameters.seed = parseInt(headers.get('seed')!));
|
| 95 |
+
|
| 96 |
+
const response = await fetch(targetUrl, {
|
| 97 |
+
method: 'POST',
|
| 98 |
+
headers: headers,
|
| 99 |
+
body: JSON.stringify(requestBody),
|
| 100 |
+
});
|
| 101 |
+
if (!response.ok) return response;
|
| 102 |
+
|
| 103 |
+
const ext = response.headers.get('content-type')!.substring('image/'.length).toLowerCase();
|
| 104 |
+
const image = await response.arrayBuffer();
|
| 105 |
+
const fileName = `${crypto.randomUUID()}.${ext}`;
|
| 106 |
+
|
| 107 |
+
await Deno.writeFile(`./tmp/${fileName}`, new Uint8Array(image));
|
| 108 |
+
|
| 109 |
+
let data: any = {
|
| 110 |
+
url: `https://t1ckbase-susface-api.hf.space/tmp/${fileName}`,
|
| 111 |
+
};
|
| 112 |
+
if (params.response_format === 'b64_json') {
|
| 113 |
+
data = {
|
| 114 |
+
b64_json: encodeBase64(image),
|
| 115 |
+
};
|
| 116 |
}
|
| 117 |
|
| 118 |
+
const resposne = {
|
| 119 |
+
created: Math.floor(Date.now() / 1000),
|
| 120 |
+
data: [data],
|
| 121 |
+
};
|
| 122 |
+
|
| 123 |
+
return new Response(JSON.stringify(resposne));
|
| 124 |
+
|
| 125 |
+
// switch (body.model) {
|
| 126 |
+
// case 'flux-dev': {
|
| 127 |
+
// return await fluxGenerateImage(body);
|
| 128 |
+
// }
|
| 129 |
+
// default:
|
| 130 |
+
// return c.text('unknown model', 400);
|
| 131 |
+
// }
|
| 132 |
+
|
| 133 |
+
// return c.text('skibidi', 400);
|
| 134 |
});
|
| 135 |
|
| 136 |
// Deno.serve({ port: 7860 }, app.fetch);
|
utils/string.ts
CHANGED
|
@@ -1,11 +1,11 @@
|
|
| 1 |
export function parseResolution(resolutionString?: string) {
|
| 2 |
-
if (!resolutionString) return { width:
|
| 3 |
const parts = resolutionString.split('x');
|
| 4 |
if (parts.length === 2) {
|
| 5 |
const width = parseInt(parts[0], 10);
|
| 6 |
const height = parseInt(parts[1], 10);
|
| 7 |
return { width, height };
|
| 8 |
} else {
|
| 9 |
-
return { width:
|
| 10 |
}
|
| 11 |
}
|
|
|
|
| 1 |
export function parseResolution(resolutionString?: string) {
|
| 2 |
+
if (!resolutionString) return { width: undefined, height: undefined };
|
| 3 |
const parts = resolutionString.split('x');
|
| 4 |
if (parts.length === 2) {
|
| 5 |
const width = parseInt(parts[0], 10);
|
| 6 |
const height = parseInt(parts[1], 10);
|
| 7 |
return { width, height };
|
| 8 |
} else {
|
| 9 |
+
return { width: undefined, height: undefined };
|
| 10 |
}
|
| 11 |
}
|