Update app.py
Browse files
app.py
CHANGED
|
@@ -2,7 +2,14 @@ import streamlit as st
|
|
| 2 |
|
| 3 |
# Function to generate ML blog content
|
| 4 |
def generate_ml_blog():
|
| 5 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 6 |
## Introduction to Machine Learning (ML)
|
| 7 |
Machine Learning (ML) is a subset of Artificial Intelligence (AI) that enables computers to learn from data and make predictions or decisions without being explicitly programmed. It has revolutionized many industries and plays a crucial role in technologies such as self-driving cars, recommendation systems, and facial recognition.
|
| 8 |
|
|
@@ -40,16 +47,185 @@ def generate_ml_blog():
|
|
| 40 |
Machine learning continues to evolve, with new algorithms, techniques, and applications emerging regularly. As the amount of data grows and computational power increases, the potential of ML to impact industries and improve our daily lives is limitless.
|
| 41 |
'''
|
| 42 |
|
| 43 |
-
return
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 44 |
|
| 45 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 46 |
|
| 47 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 48 |
|
| 49 |
-
|
|
|
|
|
|
|
|
|
|
| 50 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 51 |
st.markdown(generate_ml_blog())
|
| 52 |
|
|
|
|
| 53 |
# Display interactive elements if needed
|
| 54 |
st.sidebar.header("📚 Contents")
|
| 55 |
st.sidebar.markdown("""
|
|
@@ -67,4 +243,41 @@ st.sidebar.markdown("""
|
|
| 67 |
- 🧠 [Neural Networks](#Neural-Networks)
|
| 68 |
""")
|
| 69 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 70 |
|
|
|
|
| 2 |
|
| 3 |
# Function to generate ML blog content
|
| 4 |
def generate_ml_blog():
|
| 5 |
+
# Display an ML-related image
|
| 6 |
+
image = Image.open(r"C:\Users\user\Downloads\A_detailed_and_professional_illustration_represent.jpg") # Replace with your image path
|
| 7 |
+
blog=st.image(image, caption="Machine Learning Overview", use_column_width=True)
|
| 8 |
+
|
| 9 |
+
return blog
|
| 10 |
+
|
| 11 |
+
def introduction_to_ml()():
|
| 12 |
+
introduction_blog = '''
|
| 13 |
## Introduction to Machine Learning (ML)
|
| 14 |
Machine Learning (ML) is a subset of Artificial Intelligence (AI) that enables computers to learn from data and make predictions or decisions without being explicitly programmed. It has revolutionized many industries and plays a crucial role in technologies such as self-driving cars, recommendation systems, and facial recognition.
|
| 15 |
|
|
|
|
| 47 |
Machine learning continues to evolve, with new algorithms, techniques, and applications emerging regularly. As the amount of data grows and computational power increases, the potential of ML to impact industries and improve our daily lives is limitless.
|
| 48 |
'''
|
| 49 |
|
| 50 |
+
return introduction_blog
|
| 51 |
+
def supervised_learning():
|
| 52 |
+
supervised = '''
|
| 53 |
+
### Supervised Learning
|
| 54 |
+
Supervised learning algorithms learn from labeled data. The model is trained using a dataset where the input data and the correct output are both provided. The goal is to learn a mapping from inputs to outputs.
|
| 55 |
+
|
| 56 |
+
**Example**:
|
| 57 |
+
- **Linear Regression**: Used to predict a continuous value, such as predicting house prices.
|
| 58 |
+
```python
|
| 59 |
+
from sklearn.linear_model import LinearRegression
|
| 60 |
+
X = [[1], [2], [3], [4], [5]] # Features
|
| 61 |
+
y = [1, 2, 2.5, 4, 5] # Target
|
| 62 |
+
model = LinearRegression()
|
| 63 |
+
model.fit(X, y)
|
| 64 |
+
predictions = model.predict([[6]]) # Predict for 6 hours of study
|
| 65 |
+
```
|
| 66 |
+
'''
|
| 67 |
+
return supervised
|
| 68 |
|
| 69 |
+
def unsupervised_learning():
|
| 70 |
+
unsupervised = '''
|
| 71 |
+
### Unsupervised Learning
|
| 72 |
+
In unsupervised learning, the algorithm is given data without any labeled outputs. The goal is to find hidden patterns or groupings in the data. Examples include clustering (e.g., K-means) and dimensionality reduction techniques (e.g., PCA).
|
| 73 |
+
|
| 74 |
+
**Example**:
|
| 75 |
+
- **K-Means Clustering**: Grouping data points into clusters based on similarity.
|
| 76 |
+
```python
|
| 77 |
+
from sklearn.cluster import KMeans
|
| 78 |
+
X = [[1, 2], [1.5, 1.8], [5, 8], [8, 8], [1, 0.6], [9, 11]]
|
| 79 |
+
kmeans = KMeans(n_clusters=2)
|
| 80 |
+
kmeans.fit(X)
|
| 81 |
+
labels = kmeans.predict(X)
|
| 82 |
+
```
|
| 83 |
+
'''
|
| 84 |
+
return unsupervised
|
| 85 |
|
| 86 |
+
def reinforcement_learning():
|
| 87 |
+
reinforcement = '''
|
| 88 |
+
### Reinforcement Learning
|
| 89 |
+
Reinforcement learning involves an agent that learns to make decisions by interacting with an environment to maximize a cumulative reward. It is widely used in robotics, game AI, and real-time decision-making systems.
|
| 90 |
+
|
| 91 |
+
**Example**:
|
| 92 |
+
- **Q-Learning**: A reinforcement learning algorithm where an agent learns to maximize rewards by updating Q-values.
|
| 93 |
+
```python
|
| 94 |
+
import numpy as np
|
| 95 |
+
Q = np.zeros((5, 5)) # Example Q-table for 5 states and 5 actions
|
| 96 |
+
alpha = 0.1 # Learning rate
|
| 97 |
+
gamma = 0.9 # Discount factor
|
| 98 |
+
reward = 10
|
| 99 |
+
state = 0
|
| 100 |
+
action = 1
|
| 101 |
+
next_state = 1
|
| 102 |
+
Q[state, action] = Q[state, action] + alpha * (reward + gamma * np.max(Q[next_state]) - Q[state, action])
|
| 103 |
+
```
|
| 104 |
+
'''
|
| 105 |
+
return reinforcement
|
| 106 |
+
def linear_regression():
|
| 107 |
+
linear = '''
|
| 108 |
+
### Linear Regression
|
| 109 |
+
Linear regression is used to predict a continuous value based on one or more input features. It finds the best-fit line to minimize the error between the predicted and actual values.
|
| 110 |
+
|
| 111 |
+
**Example**:
|
| 112 |
+
- **Predicting House Prices**: Predict the price of a house based on its features such as size and location.
|
| 113 |
+
```python
|
| 114 |
+
from sklearn.linear_model import LinearRegression
|
| 115 |
+
X = [[1], [2], [3], [4], [5]] # Features (e.g., years of experience)
|
| 116 |
+
y = [1, 2, 2.5, 4, 5] # Target (e.g., salary)
|
| 117 |
+
model = LinearRegression()
|
| 118 |
+
model.fit(X, y)
|
| 119 |
+
predictions = model.predict([[6]]) # Predict for 6 years of experience
|
| 120 |
+
```
|
| 121 |
+
'''
|
| 122 |
+
return linear
|
| 123 |
|
| 124 |
+
def logistic_regression():
|
| 125 |
+
logistic = '''
|
| 126 |
+
### Logistic Regression
|
| 127 |
+
Logistic regression is used for binary classification tasks, where the goal is to predict one of two outcomes, such as pass/fail or spam/not spam.
|
| 128 |
|
| 129 |
+
**Example**:
|
| 130 |
+
- **Predicting Spam Emails**: Classifying emails as spam or not spam.
|
| 131 |
+
```python
|
| 132 |
+
from sklearn.linear_model import LogisticRegression
|
| 133 |
+
from sklearn.datasets import load_iris
|
| 134 |
+
data = load_iris()
|
| 135 |
+
X = data.data
|
| 136 |
+
y = (data.target == 0).astype(int) # Binary classification (class 0 vs others)
|
| 137 |
+
model = LogisticRegression()
|
| 138 |
+
model.fit(X, y)
|
| 139 |
+
predictions = model.predict(X)
|
| 140 |
+
```
|
| 141 |
+
'''
|
| 142 |
+
return logistic
|
| 143 |
+
|
| 144 |
+
def decision_trees():
|
| 145 |
+
decision = '''
|
| 146 |
+
### Decision Trees
|
| 147 |
+
Decision trees split the data into subsets based on feature values, creating a tree-like model. It is used for both classification and regression tasks.
|
| 148 |
+
|
| 149 |
+
**Example**:
|
| 150 |
+
- **Classifying Iris Species**: A decision tree can be used to classify different species of Iris flowers.
|
| 151 |
+
```python
|
| 152 |
+
from sklearn.tree import DecisionTreeClassifier
|
| 153 |
+
from sklearn.datasets import load_iris
|
| 154 |
+
data = load_iris()
|
| 155 |
+
X = data.data
|
| 156 |
+
y = data.target
|
| 157 |
+
model = DecisionTreeClassifier()
|
| 158 |
+
model.fit(X, y)
|
| 159 |
+
predictions = model.predict(X)
|
| 160 |
+
```
|
| 161 |
+
'''
|
| 162 |
+
return decision
|
| 163 |
+
|
| 164 |
+
def knn():
|
| 165 |
+
knn = '''
|
| 166 |
+
### K-Nearest Neighbors (KNN)
|
| 167 |
+
KNN is a simple, non-parametric algorithm that classifies data based on the majority vote of its nearest neighbors.
|
| 168 |
+
|
| 169 |
+
**Example**:
|
| 170 |
+
- **Classifying a Data Point**: Predict the class of a data point based on its nearest neighbors.
|
| 171 |
+
```python
|
| 172 |
+
from sklearn.neighbors import KNeighborsClassifier
|
| 173 |
+
from sklearn.datasets import load_iris
|
| 174 |
+
data = load_iris()
|
| 175 |
+
X = data.data
|
| 176 |
+
y = data.target
|
| 177 |
+
model = KNeighborsClassifier(n_neighbors=3)
|
| 178 |
+
model.fit(X, y)
|
| 179 |
+
predictions = model.predict(X)
|
| 180 |
+
```
|
| 181 |
+
'''
|
| 182 |
+
return knn
|
| 183 |
+
|
| 184 |
+
def svm():
|
| 185 |
+
svm = '''
|
| 186 |
+
### Support Vector Machines (SVM)
|
| 187 |
+
SVM is a powerful classifier that works well for high-dimensional data. It tries to find the hyperplane that best separates the data points of different classes.
|
| 188 |
+
|
| 189 |
+
**Example**:
|
| 190 |
+
- **Classifying Iris Flowers**: An SVM can be used to classify Iris flowers into different species.
|
| 191 |
+
```python
|
| 192 |
+
from sklearn.svm import SVC
|
| 193 |
+
from sklearn.datasets import load_iris
|
| 194 |
+
data = load_iris()
|
| 195 |
+
X = data.data
|
| 196 |
+
y = data.target
|
| 197 |
+
model = SVC(kernel='linear')
|
| 198 |
+
model.fit(X, y)
|
| 199 |
+
predictions = model.predict(X)
|
| 200 |
+
```
|
| 201 |
+
'''
|
| 202 |
+
return svm
|
| 203 |
+
|
| 204 |
+
def neural_networks():
|
| 205 |
+
neural = '''
|
| 206 |
+
### Neural Networks
|
| 207 |
+
Neural networks are modeled after the human brain, with layers of interconnected nodes (neurons) used for tasks like image and speech recognition.
|
| 208 |
+
|
| 209 |
+
**Example**:
|
| 210 |
+
- **Classifying Handwritten Digits**: A simple neural network can be used to classify digits from the MNIST dataset.
|
| 211 |
+
```python
|
| 212 |
+
from sklearn.neural_network import MLPClassifier
|
| 213 |
+
from sklearn.datasets import load_iris
|
| 214 |
+
data = load_iris()
|
| 215 |
+
X = data.data
|
| 216 |
+
y = data.target
|
| 217 |
+
model = MLPClassifier(hidden_layer_sizes=(10,), max_iter=1000)
|
| 218 |
+
model.fit(X, y)
|
| 219 |
+
predictions = model.predict(X)
|
| 220 |
+
```
|
| 221 |
+
'''
|
| 222 |
+
return neural
|
| 223 |
+
|
| 224 |
+
|
| 225 |
+
# Streamlit UI Components
|
| 226 |
st.markdown(generate_ml_blog())
|
| 227 |
|
| 228 |
+
|
| 229 |
# Display interactive elements if needed
|
| 230 |
st.sidebar.header("📚 Contents")
|
| 231 |
st.sidebar.markdown("""
|
|
|
|
| 243 |
- 🧠 [Neural Networks](#Neural-Networks)
|
| 244 |
""")
|
| 245 |
|
| 246 |
+
# Display content based on the sidebar selection
|
| 247 |
+
|
| 248 |
+
page = st.sidebar.radio("Select Section", [
|
| 249 |
+
"Introduction",
|
| 250 |
+
"Supervised Learning",
|
| 251 |
+
"Unsupervised Learning",
|
| 252 |
+
"Reinforcement Learning",
|
| 253 |
+
"Linear Regression",
|
| 254 |
+
"Logistic Regression",
|
| 255 |
+
"Decision Trees",
|
| 256 |
+
"K-Nearest Neighbors (KNN)",
|
| 257 |
+
"Support Vector Machines (SVM)",
|
| 258 |
+
"Neural Networks"
|
| 259 |
+
])
|
| 260 |
+
|
| 261 |
+
if page == "Introduction":
|
| 262 |
+
st.markdown("<h1 style='text-align: center; color: orange;'>Machine Learning (ML)</h1>", unsafe_allow_html=True)
|
| 263 |
+
st.markdown("<h2 style='text-align: center; color: orange;'>Introduction</h2>", unsafe_allow_html=True)
|
| 264 |
+
st.markdown(introduction_to_ml())
|
| 265 |
+
elif page == "Supervised Learning":
|
| 266 |
+
st.markdown(supervised_learning())
|
| 267 |
+
elif page == "Unsupervised Learning":
|
| 268 |
+
st.markdown(unsupervised_learning())
|
| 269 |
+
elif page == "Reinforcement Learning":
|
| 270 |
+
st.markdown(reinforcement_learning())
|
| 271 |
+
elif page == "Linear Regression":
|
| 272 |
+
st.markdown(linear_regression())
|
| 273 |
+
elif page == "Logistic Regression":
|
| 274 |
+
st.markdown(logistic_regression())
|
| 275 |
+
elif page == "Decision Trees":
|
| 276 |
+
st.markdown(decision_trees())
|
| 277 |
+
elif page == "K-Nearest Neighbors (KNN)":
|
| 278 |
+
st.markdown(knn())
|
| 279 |
+
elif page == "Support Vector Machines (SVM)":
|
| 280 |
+
st.markdown(svm())
|
| 281 |
+
elif page == "Neural Networks":
|
| 282 |
+
st.markdown(neural_networks())
|
| 283 |
|