admin08077 commited on
Commit
5143a4d
·
verified ·
1 Parent(s): 8537452

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +38 -0
app.py ADDED
@@ -0,0 +1,38 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ import base64
3
+ import hashlib
4
+ import sqlite3
5
+
6
+ # Create the SQLite database and table if they don't exist
7
+ conn = sqlite3.connect('tokens.db')
8
+ c = conn.cursor()
9
+ c.execute('''CREATE TABLE IF NOT EXISTS tokens
10
+ (token TEXT)''')
11
+ conn.commit()
12
+ conn.close()
13
+
14
+ def tokenize_file(file):
15
+ tokens = []
16
+
17
+ # Base64 encode the file
18
+ encoded_file = base64.b64encode(file.read()).decode('utf-8')
19
+
20
+ # Split the encoded file into 40-character chunks
21
+ chunks = [encoded_file[i:i+40] for i in range(0, len(encoded_file), 40)]
22
+
23
+ # Hash each chunk using Keccak256 and store in the database
24
+ for chunk in chunks:
25
+ hashed_chunk = hashlib.sha3_256(chunk.encode()).hexdigest()
26
+ tokens.append(hashed_chunk)
27
+ conn = sqlite3.connect('tokens.db')
28
+ c = conn.cursor()
29
+ c.execute('INSERT INTO tokens (token) VALUES (?)', (hashed_chunk,))
30
+ conn.commit()
31
+ conn.close()
32
+
33
+ return tokens
34
+
35
+ file_input = gr.inputs.File(label="Upload a file")
36
+ output_text = gr.outputs.Textbox(label="Tokens")
37
+
38
+ gr.Interface(fn=tokenize_file, inputs=file_input, outputs=output_text).launch()