DOMMETI commited on
Commit
36f5d04
Β·
verified Β·
1 Parent(s): d267671

Update pages/15_SVM.py

Browse files
Files changed (1) hide show
  1. pages/15_SVM.py +5 -3
pages/15_SVM.py CHANGED
@@ -81,7 +81,7 @@ with st.expander("πŸ“ Mathematical Formulation"):
81
  st.latex(r"y_i (w^T x_i + b) \geq 1 - \xi_i")
82
 
83
  st.markdown(r"### Slack Variable \( \xi_i \) Interpretation:")
84
- st.write("""
85
  - \( \xi_i = 0 \): Correct and outside the margin
86
  - \( 0 < \xi_i \leq 1 \): Inside the margin, but correctly classified
87
  - \( \xi_i > 1 \): Misclassified
@@ -105,13 +105,16 @@ with st.expander("βœ… Pros & Cons of SVM"):
105
  # Dual Form & Kernel Trick
106
  with st.expander("πŸ”„ Dual Form & Kernel Trick"):
107
  st.markdown(r"""
 
 
108
  ### Common Kernels:
109
  - **Linear Kernel**: \( K(x, x') = x^T x' \)
110
  - **Polynomial Kernel**: \( K(x, x') = (x^T x' + c)^d \)
111
  - **RBF (Gaussian)**: \( K(x, x') = \exp(-\gamma \|x - x'\|^2) \)
112
  - **Sigmoid Kernel**: Mimics activation of neural networks
113
- """)
114
 
 
 
115
 
116
  # Hyperparameters
117
  with st.expander("βš™οΈ Hyperparameter Tuning"):
@@ -130,4 +133,3 @@ with st.expander("βš™οΈ Hyperparameter Tuning"):
130
  # Outro
131
  st.markdown("---")
132
  st.success("SVMs are powerful and flexible. Mastering margins, kernels, and regularization is key to using them effectively!")
133
-
 
81
  st.latex(r"y_i (w^T x_i + b) \geq 1 - \xi_i")
82
 
83
  st.markdown(r"### Slack Variable \( \xi_i \) Interpretation:")
84
+ st.write(r"""
85
  - \( \xi_i = 0 \): Correct and outside the margin
86
  - \( 0 < \xi_i \leq 1 \): Inside the margin, but correctly classified
87
  - \( \xi_i > 1 \): Misclassified
 
105
  # Dual Form & Kernel Trick
106
  with st.expander("πŸ”„ Dual Form & Kernel Trick"):
107
  st.markdown(r"""
108
+ When data is not linearly separable in its original space, we use the **kernel trick** to transform it.
109
+
110
  ### Common Kernels:
111
  - **Linear Kernel**: \( K(x, x') = x^T x' \)
112
  - **Polynomial Kernel**: \( K(x, x') = (x^T x' + c)^d \)
113
  - **RBF (Gaussian)**: \( K(x, x') = \exp(-\gamma \|x - x'\|^2) \)
114
  - **Sigmoid Kernel**: Mimics activation of neural networks
 
115
 
116
+ βœ… The kernel trick allows working in higher dimensions **without explicitly transforming** the data.
117
+ """)
118
 
119
  # Hyperparameters
120
  with st.expander("βš™οΈ Hyperparameter Tuning"):
 
133
  # Outro
134
  st.markdown("---")
135
  st.success("SVMs are powerful and flexible. Mastering margins, kernels, and regularization is key to using them effectively!")