sachnun commited on
Commit
1b6d346
·
1 Parent(s): 5ab36b4

Add dist/ to gitignore and remove from tracking

Browse files

Build artifacts should not be committed as they are generated during Docker build.

Files changed (4) hide show
  1. .gitignore +3 -0
  2. dist/index.js +0 -219
  3. dist/lib/db/index.js +0 -15
  4. dist/lib/db/schema.js +0 -46
.gitignore CHANGED
@@ -11,6 +11,9 @@
11
  # deps
12
  node_modules/
13
 
 
 
 
14
  # env
15
  .env
16
  .env.production
 
11
  # deps
12
  node_modules/
13
 
14
+ # build output (TypeScript)
15
+ dist/
16
+
17
  # env
18
  .env
19
  .env.production
dist/index.js DELETED
@@ -1,219 +0,0 @@
1
- import { serve } from '@hono/node-server';
2
- import { Hono } from 'hono';
3
- import { cors } from 'hono/cors';
4
- import { uploadFile } from '@huggingface/hub';
5
- import { createHash } from 'crypto';
6
- import { createDb } from './lib/db/index.js';
7
- import { user, session } from './lib/db/schema.js';
8
- import { eq } from 'drizzle-orm';
9
- import { createHash as createSha256 } from 'crypto';
10
- const app = new Hono();
11
- // Environment variables
12
- const HF_TOKEN = process.env.HF_TOKEN || '';
13
- const HF_DATASET_REPO = process.env.HF_DATASET_REPO || '';
14
- const UPLOAD_SERVER_TOKEN = process.env.UPLOAD_SERVER_TOKEN || '';
15
- const ALLOWED_ORIGIN = process.env.ALLOWED_ORIGIN || '*';
16
- const DATABASE_URL = process.env.DATABASE_URL || '';
17
- const DATABASE_AUTH_TOKEN = process.env.DATABASE_AUTH_TOKEN || '';
18
- // CORS configuration
19
- app.use('/*', cors({
20
- origin: ALLOWED_ORIGIN,
21
- allowMethods: ['POST', 'GET', 'OPTIONS'],
22
- allowHeaders: ['Content-Type', 'Authorization'],
23
- exposeHeaders: ['Content-Length'],
24
- maxAge: 86400,
25
- credentials: true,
26
- }));
27
- // Helper function to validate session token and get userId
28
- async function validateSessionToken(sessionToken) {
29
- try {
30
- // Hash the session token with SHA-256 (same as hugstream main app)
31
- const sessionId = createSha256('sha256').update(sessionToken).digest('hex');
32
- const db = createDb({
33
- DATABASE_URL,
34
- DATABASE_AUTH_TOKEN,
35
- HF_TOKEN,
36
- HF_DATASET_REPO
37
- });
38
- // Query session from database
39
- const [sessionRecord] = await db
40
- .select({
41
- userId: session.userId,
42
- expiresAt: session.expiresAt
43
- })
44
- .from(session)
45
- .where(eq(session.id, sessionId))
46
- .limit(1);
47
- if (!sessionRecord) {
48
- return null;
49
- }
50
- // Check if session is expired
51
- if (new Date() >= new Date(sessionRecord.expiresAt)) {
52
- return null;
53
- }
54
- return sessionRecord.userId;
55
- }
56
- catch (error) {
57
- console.error('[AUTH] Session validation error:', error);
58
- return null;
59
- }
60
- }
61
- // Auth middleware
62
- const authMiddleware = async (c, next) => {
63
- const authHeader = c.req.header('Authorization');
64
- if (!authHeader || !authHeader.startsWith('Bearer ')) {
65
- return c.json({ error: 'Unauthorized: Missing or invalid Authorization header' }, 401);
66
- }
67
- const token = authHeader.substring(7); // Remove "Bearer " prefix
68
- if (token !== UPLOAD_SERVER_TOKEN) {
69
- return c.json({ error: 'Unauthorized: Invalid token' }, 401);
70
- }
71
- await next();
72
- };
73
- // Health check endpoint
74
- app.get('/health', (c) => {
75
- return c.json({
76
- status: 'ok',
77
- service: 'hugstream-upload',
78
- timestamp: new Date().toISOString()
79
- });
80
- });
81
- /**
82
- * Upload endpoint: POST /upload
83
- *
84
- * Receives file from hugstream main app and uploads to Hugging Face Dataset
85
- *
86
- * Request body (multipart/form-data):
87
- * - file: File to upload
88
- * - sessionToken: Session token from auth-session cookie (for authentication)
89
- * - hash: MD5 hash of the file (for deduplication)
90
- * - filename: Original filename
91
- *
92
- * Response:
93
- * - success: boolean
94
- * - hfPath: Path in HF Dataset where file was uploaded
95
- * - message: Status message
96
- */
97
- app.post('/upload', authMiddleware, async (c) => {
98
- try {
99
- // Validate HF configuration
100
- if (!HF_TOKEN || !HF_DATASET_REPO) {
101
- console.error('Missing HF_TOKEN or HF_DATASET_REPO environment variables');
102
- return c.json({
103
- error: 'Server configuration error: Missing HF credentials'
104
- }, 500);
105
- }
106
- // Parse multipart form data
107
- const formData = await c.req.formData();
108
- const file = formData.get('file');
109
- const sessionToken = formData.get('sessionToken');
110
- const hash = formData.get('hash');
111
- const filename = formData.get('filename');
112
- // Validate required fields
113
- if (!file) {
114
- return c.json({ error: 'Missing file in request' }, 400);
115
- }
116
- if (!sessionToken) {
117
- return c.json({ error: 'Missing sessionToken in request' }, 400);
118
- }
119
- if (!hash) {
120
- return c.json({ error: 'Missing hash in request' }, 400);
121
- }
122
- if (!filename) {
123
- return c.json({ error: 'Missing filename in request' }, 400);
124
- }
125
- // Validate session and get userId
126
- const userId = await validateSessionToken(sessionToken);
127
- if (!userId) {
128
- console.error('[UPLOAD] Invalid or expired session token');
129
- return c.json({ error: 'Unauthorized: Invalid or expired session' }, 401);
130
- }
131
- console.log(`[UPLOAD] Session validated for user ${userId}`);
132
- console.log(`[UPLOAD] Starting upload, file: ${filename}, hash: ${hash}, size: ${file.size} bytes`);
133
- // Convert file to buffer for hash verification
134
- const arrayBuffer = await file.arrayBuffer();
135
- const buffer = Buffer.from(arrayBuffer);
136
- // Verify hash to ensure data integrity
137
- const calculatedHash = createHash('md5').update(buffer).digest('hex');
138
- if (calculatedHash !== hash) {
139
- console.error(`[UPLOAD] Hash mismatch! Expected: ${hash}, Got: ${calculatedHash}`);
140
- return c.json({
141
- error: 'Hash verification failed: File may be corrupted'
142
- }, 400);
143
- }
144
- // Build HF path using only hash (global deduplication)
145
- const hfPath = hash;
146
- console.log(`[UPLOAD] Hash verified. Uploading to HF path: ${hfPath}`);
147
- // Upload to Hugging Face Dataset
148
- try {
149
- await uploadFile({
150
- repo: {
151
- type: 'dataset',
152
- name: HF_DATASET_REPO
153
- },
154
- accessToken: HF_TOKEN,
155
- file: {
156
- path: hfPath,
157
- content: new Blob([buffer])
158
- }
159
- });
160
- console.log(`[UPLOAD] Successfully uploaded to Hugging Face: ${hfPath}`);
161
- return c.json({
162
- success: true,
163
- hfPath,
164
- message: 'File uploaded successfully',
165
- size: buffer.length,
166
- hash: calculatedHash
167
- });
168
- }
169
- catch (hfError) {
170
- console.error('[UPLOAD] Hugging Face upload failed:', hfError);
171
- // Check if it's a conflict error (file already exists)
172
- if (hfError.message && hfError.message.includes('conflict')) {
173
- console.log(`[UPLOAD] File already exists in HF (deduplication): ${hfPath}`);
174
- return c.json({
175
- success: true,
176
- hfPath,
177
- message: 'File already exists (deduplicated)',
178
- size: buffer.length,
179
- hash: calculatedHash,
180
- deduplicated: true
181
- });
182
- }
183
- throw hfError;
184
- }
185
- }
186
- catch (error) {
187
- console.error('[UPLOAD] Upload error:', error);
188
- return c.json({
189
- error: 'Failed to upload file',
190
- details: error.message || 'Unknown error'
191
- }, 500);
192
- }
193
- });
194
- /**
195
- * Chunked upload endpoint: POST /upload/chunked
196
- * For large files that need to be uploaded in chunks
197
- * (Future implementation - not used yet)
198
- */
199
- app.post('/upload/chunked', authMiddleware, async (c) => {
200
- return c.json({
201
- error: 'Chunked upload not implemented yet'
202
- }, 501);
203
- });
204
- const port = parseInt(process.env.PORT || '7860');
205
- serve({
206
- fetch: app.fetch,
207
- port: port,
208
- hostname: '0.0.0.0'
209
- }, (info) => {
210
- console.log(`
211
- ╔════════════════════════════════════════════╗
212
- ║ Hugstream Upload Server ║
213
- ║ Status: Running ║
214
- ║ Port: ${info.port} ║
215
- ║ HF Dataset: ${HF_DATASET_REPO ? '✓ Configured' : '✗ Not configured'} ║
216
- ║ Auth Token: ${UPLOAD_SERVER_TOKEN ? '✓ Configured' : '✗ Not configured'} ║
217
- ╚════════════════════════════════════════════╝
218
- `);
219
- });
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
dist/lib/db/index.js DELETED
@@ -1,15 +0,0 @@
1
- import { drizzle } from 'drizzle-orm/libsql';
2
- import * as schema from './schema.js';
3
- /**
4
- * Create database instance for Cloudflare Worker
5
- * This is called per-request in the worker context
6
- */
7
- export function createDb(env) {
8
- return drizzle({
9
- connection: {
10
- url: env.DATABASE_URL,
11
- authToken: env.DATABASE_AUTH_TOKEN
12
- },
13
- schema
14
- });
15
- }
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
dist/lib/db/schema.js DELETED
@@ -1,46 +0,0 @@
1
- import { sqliteTable, integer, text, index } from 'drizzle-orm/sqlite-core';
2
- export const user = sqliteTable('user', {
3
- id: text('id').primaryKey(),
4
- age: integer('age'),
5
- username: text('username').notNull().unique(),
6
- passwordHash: text('password_hash').notNull(),
7
- storageQuota: integer('storage_quota').notNull().default(10995116277760) // 10TB default quota
8
- });
9
- export const session = sqliteTable('session', {
10
- id: text('id').primaryKey(),
11
- userId: text('user_id').notNull().references(() => user.id),
12
- expiresAt: integer('expires_at', { mode: 'timestamp' }).notNull()
13
- }, (table) => ({
14
- userIdIdx: index('session_user_id_idx').on(table.userId),
15
- expiresAtIdx: index('session_expires_at_idx').on(table.expiresAt)
16
- }));
17
- export const file = sqliteTable('file', {
18
- id: text('id').primaryKey(),
19
- name: text('name').notNull(),
20
- path: text('path').notNull(),
21
- size: integer('size').notNull(),
22
- mimeType: text('mime_type').notNull(),
23
- userId: text('user_id').notNull().references(() => user.id),
24
- parentId: text('parent_id'),
25
- isFolder: integer('is_folder', { mode: 'boolean' }).notNull().default(false),
26
- deletedAt: integer('deleted_at', { mode: 'timestamp' }),
27
- createdAt: integer('created_at', { mode: 'timestamp' }).notNull(),
28
- updatedAt: integer('updated_at', { mode: 'timestamp' }).notNull(),
29
- // Hugging Face storage fields
30
- hash: text('hash'), // MD5 hash for deduplication
31
- hfPath: text('hf_path'), // Path in HF Dataset: {userId}/{hash}/{filename}
32
- isUploaded: integer('is_uploaded', { mode: 'boolean' }).default(false), // Upload status to HF
33
- uploadedAt: integer('uploaded_at', { mode: 'timestamp' }) // When successfully uploaded to HF
34
- }, (table) => ({
35
- userIdIdx: index('file_user_id_idx').on(table.userId),
36
- parentIdIdx: index('file_parent_id_idx').on(table.parentId),
37
- isFolderIdx: index('file_is_folder_idx').on(table.isFolder),
38
- deletedAtIdx: index('file_deleted_at_idx').on(table.deletedAt),
39
- nameIdx: index('file_name_idx').on(table.name),
40
- userFolderIdx: index('file_user_folder_idx').on(table.userId, table.isFolder, table.deletedAt),
41
- userParentDeletedIdx: index('file_user_parent_deleted_idx').on(table.userId, table.parentId, table.deletedAt),
42
- hashIdx: index('file_hash_idx').on(table.hash),
43
- userHashIdx: index('file_user_hash_idx').on(table.userId, table.hash),
44
- uploadedHashIdx: index('file_uploaded_hash_idx').on(table.isUploaded, table.hash),
45
- hfPathIdx: index('file_hf_path_idx').on(table.hfPath)
46
- }));