willco-afk commited on
Commit
8ef1729
·
verified ·
1 Parent(s): 92bb708

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +24 -252
app.py CHANGED
@@ -1,255 +1,27 @@
1
  import streamlit as st
2
- import pandas as pd
3
- import subprocess
4
- import time
5
- import random
6
  import numpy as np
7
  import tensorflow as tf
8
- from tensorflow.keras import layers, models
9
- from transformers import BertTokenizer, TFBertModel
10
- import requests
11
- import matplotlib.pyplot as plt
12
- from io import BytesIO
13
- import base64
14
-
15
- # ---------------------------- Helper Function for NER Data ----------------------------
16
-
17
- def generate_ner_data():
18
- # Sample NER data for different entities
19
- data_person = [{"text": f"Person example {i}", "entities": [{"entity": "Person", "value": f"Person {i}"}]} for i in range(1, 21)]
20
- data_organization = [{"text": f"Organization example {i}", "entities": [{"entity": "Organization", "value": f"Organization {i}"}]} for i in range(1, 21)]
21
- data_location = [{"text": f"Location example {i}", "entities": [{"entity": "Location", "value": f"Location {i}"}]} for i in range(1, 21)]
22
- data_date = [{"text": f"Date example {i}", "entities": [{"entity": "Date", "value": f"Date {i}"}]} for i in range(1, 21)]
23
- data_product = [{"text": f"Product example {i}", "entities": [{"entity": "Product", "value": f"Product {i}"}]} for i in range(1, 21)]
24
-
25
- # Create a dictionary of all NER examples
26
- ner_data = {
27
- "Person": data_person,
28
- "Organization": data_organization,
29
- "Location": data_location,
30
- "Date": data_date,
31
- "Product": data_product
32
- }
33
-
34
- return ner_data
35
-
36
- # ---------------------------- Fun NER Data Function ----------------------------
37
-
38
- def ner_demo():
39
- st.header("🤖 LLM NER Model Demo 🕵️‍♀️")
40
-
41
- # Generate NER data
42
- ner_data = generate_ner_data()
43
-
44
- # Pick a random entity type to display
45
- entity_type = random.choice(list(ner_data.keys()))
46
- st.subheader(f"Here comes the {entity_type} entity recognition, ready to show its magic! 🎩✨")
47
-
48
- # Select a random record to display
49
- example = random.choice(ner_data[entity_type])
50
- st.write(f"Analyzing: *{example['text']}*")
51
-
52
- # Display recognized entity
53
- for entity in example["entities"]:
54
- st.success(f"🔍 Found a {entity['entity']}: **{entity['value']}**")
55
-
56
- # A bit of rhyme to lighten up the task
57
- st.write("There once was an AI so bright, 🎇")
58
- st.write("It could spot any name in sight, 👁️")
59
- st.write("With a click or a tap, it put on its cap, 🎩")
60
- st.write("And found entities day or night! 🌙")
61
-
62
- # ---------------------------- Helper: Text Data Augmentation ----------------------------
63
-
64
- def word_subtraction(text):
65
- """Subtract words at random positions."""
66
- words = text.split()
67
- if len(words) > 2:
68
- index = random.randint(0, len(words) - 1)
69
- words.pop(index)
70
- return " ".join(words)
71
-
72
- def word_recombination(text):
73
- """Recombine words with random shuffling."""
74
- words = text.split()
75
- random.shuffle(words)
76
- return " ".join(words)
77
-
78
- # ---------------------------- ML Model Building ----------------------------
79
-
80
- def build_small_model(input_shape):
81
- model = models.Sequential()
82
- model.add(layers.Dense(64, activation='relu', input_shape=(input_shape,)))
83
- model.add(layers.Dense(32, activation='relu'))
84
- model.add(layers.Dense(1, activation='sigmoid'))
85
- model.compile(optimizer='adam', loss='binary_crossentropy', metrics=['accuracy'])
86
- return model
87
-
88
- # ---------------------------- TensorFlow and Keras Integration ----------------------------
89
-
90
- def train_model_demo():
91
- st.header("🧪 Let's Build a Mini TensorFlow Model 🎓")
92
-
93
- # Generate random synthetic data for simplicity
94
- data_size = 100
95
- X_train = np.random.rand(data_size, 10)
96
- y_train = np.random.randint(0, 2, size=data_size)
97
-
98
- st.write(f"🚀 **Data Shape**: {X_train.shape}, with binary target labels.")
99
-
100
- # Build the model
101
- model = build_small_model(X_train.shape[1])
102
-
103
- st.write("🔧 **Model Summary**:")
104
- st.text(model.summary())
105
-
106
- # Train the model
107
- st.write("🚀 **Training the model...**")
108
- history = model.fit(X_train, y_train, epochs=5, batch_size=16, verbose=0)
109
-
110
- # Output training results humorously
111
- st.success("🎉 Training completed! The model now knows its ABCs... or 1s and 0s at least! 😂")
112
-
113
- st.write(f"Final training loss: **{history.history['loss'][-1]:.4f}**, accuracy: **{history.history['accuracy'][-1]:.4f}**")
114
- st.write("Fun fact: This model can make predictions on binary outcomes like whether a cat will sleep or not. 🐱💤")
115
-
116
- # ---------------------------- Additional Useful Examples ----------------------------
117
-
118
- def code_snippet_sharing():
119
- st.header("📤 Code Snippet Sharing with Syntax Highlighting 🖥️")
120
-
121
- code = '''def hello_world():
122
- print("Hello, world!")'''
123
-
124
- st.code(code, language='python')
125
-
126
- st.write("Developers often need to share code snippets. Here's how you can display code with syntax highlighting in Streamlit! 🌈")
127
-
128
- def file_uploader_example():
129
- st.header("📁 File Uploader Example 📤")
130
-
131
- uploaded_file = st.file_uploader("Choose a CSV file", type="csv")
132
- if uploaded_file is not None:
133
- data = pd.read_csv(uploaded_file)
134
- st.write("🎉 File uploaded successfully!")
135
- st.dataframe(data.head())
136
- st.write("Use file uploaders to allow users to bring their own data into your app! 📊")
137
-
138
- def matplotlib_plot_example():
139
- st.header("📈 Matplotlib Plot Example 📊")
140
-
141
- # Generate data
142
- x = np.linspace(0, 10, 100)
143
- y = np.sin(x)
144
-
145
- # Create plot
146
- fig, ax = plt.subplots()
147
- ax.plot(x, y)
148
- ax.set_title('Sine Wave')
149
- st.pyplot(fig)
150
-
151
- st.write("You can integrate Matplotlib plots directly into your Streamlit app! 🎨")
152
-
153
- def cache_example():
154
- st.header("⚡ Streamlit Cache Example 🚀")
155
-
156
- @st.cache
157
- def expensive_computation(a, b):
158
- time.sleep(2)
159
- return a * b
160
-
161
- st.write("Let's compute something that takes time...")
162
- result = expensive_computation(2, 21)
163
- st.write(f"The result is {result}. But thanks to caching, it's faster the next time! ⚡")
164
-
165
- # ---------------------------- Display Tweet ----------------------------
166
-
167
- def display_tweet():
168
- st.header("🐦 Tweet Spotlight: TensorFlow and Transformers 🌟")
169
-
170
- tweet_html = '''
171
- <blockquote class="twitter-tweet">
172
- <p lang="en" dir="ltr">
173
- Just tried integrating TensorFlow with Transformers for my latest LLM project! 🚀
174
- The synergy between them is incredible. TensorFlow's flexibility combined with Transformers' power boosts Generative AI capabilities to new heights! 🔥 #TensorFlow #Transformers #AI #MachineLearning
175
- </p>&mdash; AI Enthusiast (@ai_enthusiast) <a href="https://twitter.com/ai_enthusiast/status/1234567890">September 30, 2024</a>
176
- </blockquote>
177
- <script async src="https://platform.twitter.com/widgets.js" charset="utf-8"></script>
178
- '''
179
-
180
- st.components.v1.html(tweet_html, height=300)
181
-
182
- st.write("Tweets can be embedded to showcase social proof or updates. Isn't that neat? 🐤")
183
-
184
- # ---------------------------- Header and Introduction ----------------------------
185
-
186
- st.set_page_config(page_title="LLMs and Tiny ML Models", page_icon="🤖", layout="wide", initial_sidebar_state="expanded")
187
- st.title("🤖📊 LLMs and Tiny ML Models with TensorFlow 📊🤖")
188
- st.markdown("This app demonstrates how to build small TensorFlow models, solve common developer problems, and augment text data using word subtraction and recombination strategies.")
189
- st.markdown("---")
190
-
191
- # ---------------------------- Main Navigation ----------------------------
192
-
193
- st.sidebar.title("Navigation")
194
- options = st.sidebar.radio("Go to", ['NER Demo', 'TensorFlow Model', 'Text Augmentation', 'Code Sharing', 'File Uploader', 'Matplotlib Plot', 'Streamlit Cache', 'Tweet Spotlight'])
195
-
196
- if options == 'NER Demo':
197
- if st.button('🧪 Run NER Model Demo'):
198
- ner_demo()
199
- else:
200
- st.write("Click the button above to start the AI NER magic! 🎩✨")
201
-
202
- elif options == 'TensorFlow Model':
203
- if st.button('🚀 Build and Train a TensorFlow Model'):
204
- train_model_demo()
205
-
206
- elif options == 'Text Augmentation':
207
- st.subheader("🎲 Fun Text Augmentation with Random Strategies 🎲")
208
- input_text = st.text_input("Enter a sentence to see some augmentation magic! ✨", "TensorFlow is awesome!")
209
- if st.button("Subtract Random Words"):
210
- st.write(f"Original: **{input_text}**")
211
- st.write(f"Augmented: **{word_subtraction(input_text)}**")
212
- if st.button("Recombine Words"):
213
- st.write(f"Original: **{input_text}**")
214
- st.write(f"Augmented: **{word_recombination(input_text)}**")
215
- st.write("Try both and see how the magic works! 🎩✨")
216
-
217
- elif options == 'Code Sharing':
218
- code_snippet_sharing()
219
-
220
- elif options == 'File Uploader':
221
- file_uploader_example()
222
-
223
- elif options == 'Matplotlib Plot':
224
- matplotlib_plot_example()
225
-
226
- elif options == 'Streamlit Cache':
227
- cache_example()
228
-
229
- elif options == 'Tweet Spotlight':
230
- display_tweet()
231
-
232
- st.markdown("---")
233
-
234
- # ---------------------------- Footer and Additional Resources ----------------------------
235
-
236
- st.subheader("📚 Additional Resources")
237
- st.markdown("""
238
- - [Official Streamlit Documentation](https://docs.streamlit.io/)
239
- - [TensorFlow Documentation](https://www.tensorflow.org/api_docs)
240
- - [Transformers Documentation](https://huggingface.co/docs/transformers/index)
241
- - [Streamlit Cheat Sheet](https://docs.streamlit.io/library/cheatsheet)
242
- - [Matplotlib Documentation](https://matplotlib.org/stable/contents.html)
243
- """)
244
-
245
- # ---------------------------- requirements.txt ----------------------------
246
- st.markdown('''
247
- Reference Libraries:
248
- plaintext
249
- streamlit
250
- pandas
251
- numpy
252
- tensorflow
253
- transformers
254
- matplotlib
255
- ''')
 
1
  import streamlit as st
 
 
 
 
2
  import numpy as np
3
  import tensorflow as tf
4
+ from PIL import Image
5
+ import os
6
+
7
+ # Load the model directly from the path (no need to unzip)
8
+ model_path = 'your_trained_model.keras' # Path to your model
9
+ model = tf.keras.models.load_model(model_path) # Load the model
10
+
11
+ # Streamlit UI for uploading an image
12
+ st.title("Tree Decoration Prediction")
13
+ uploaded_image = st.file_uploader("Upload an image", type=["jpg", "png"])
14
+
15
+ if uploaded_image:
16
+ # Process the image for prediction
17
+ img = Image.open(uploaded_image)
18
+ img = img.resize((224, 224)) # Resize to match the input size of your model
19
+ img_array = np.array(img) / 255.0 # Normalize the image
20
+ img_array = np.expand_dims(img_array, axis=0) # Add batch dimension
21
+
22
+ # Make prediction
23
+ prediction = model.predict(img_array)
24
+
25
+ # Display the result
26
+ st.image(uploaded_image, caption="Uploaded Image.", use_column_width=True)
27
+ st.write(f"Prediction: {'Decorated' if prediction[0] > 0.5 else 'Undecorated'}")