Update app.py
Browse files
app.py
CHANGED
|
@@ -72,9 +72,9 @@ except ValueError as e:
|
|
| 72 |
X_train_resampled, y_train_resampled = X_train_tfidf, y_train
|
| 73 |
|
| 74 |
# Logistic Regression Model
|
| 75 |
-
# max iter exceeding
|
| 76 |
-
# Don't set C low, set to 100+ default.
|
| 77 |
-
model = LogisticRegression(C=
|
| 78 |
model.fit(X_train_resampled, y_train_resampled)
|
| 79 |
|
| 80 |
# Evaluate Model
|
|
@@ -165,6 +165,7 @@ def analyze_tone(text, selected_tone=None):
|
|
| 165 |
# Gradio interface Creation
|
| 166 |
with gr.Blocks(theme=gr.themes.Soft(primary_hue="blue")) as demo:
|
| 167 |
gr.Markdown("# Text Tone Sentimental Analyzer")
|
|
|
|
| 168 |
|
| 169 |
with gr.Row():
|
| 170 |
with gr.Column(scale=3):
|
|
@@ -176,12 +177,13 @@ with gr.Blocks(theme=gr.themes.Soft(primary_hue="blue")) as demo:
|
|
| 176 |
analyze_button = gr.Button("Analyze Tone", variant="primary")
|
| 177 |
|
| 178 |
with gr.Column(scale=2):
|
| 179 |
-
# Example Tones Dropdown
|
| 180 |
tone_dropdown = gr.Dropdown(
|
| 181 |
choices=sorted(df['label'].unique().tolist()),
|
| 182 |
label="Select a tone to view an example below."
|
| 183 |
)
|
| 184 |
-
|
|
|
|
|
|
|
| 185 |
with gr.Row():
|
| 186 |
with gr.Column(scale=1):
|
| 187 |
result_message = gr.Markdown()
|
|
@@ -189,22 +191,22 @@ with gr.Blocks(theme=gr.themes.Soft(primary_hue="blue")) as demo:
|
|
| 189 |
with gr.Row():
|
| 190 |
with gr.Column(scale=2):
|
| 191 |
plot_output = gr.Plot(label="Tone Probabilities")
|
| 192 |
-
|
| 193 |
-
all_probs_output = gr.JSON(label="All Probabilities")
|
| 194 |
-
|
| 195 |
with gr.Row():
|
| 196 |
examples_output = gr.Dataframe(
|
| 197 |
headers=["Examples of similar texts"],
|
| 198 |
datatype=["str"],
|
| 199 |
label="Example texts with similar tone"
|
| 200 |
)
|
| 201 |
-
|
|
|
|
| 202 |
analyze_button.click(
|
| 203 |
fn=analyze_tone,
|
| 204 |
inputs=[text_input, tone_dropdown],
|
| 205 |
-
outputs=[result_message,
|
| 206 |
)
|
| 207 |
|
|
|
|
| 208 |
tone_dropdown.change(
|
| 209 |
fn=get_tone_examples,
|
| 210 |
inputs=tone_dropdown,
|
|
|
|
| 72 |
X_train_resampled, y_train_resampled = X_train_tfidf, y_train
|
| 73 |
|
| 74 |
# Logistic Regression Model
|
| 75 |
+
# max iter exceeding 200 doesnt improve anything
|
| 76 |
+
# Don't set C low, set to 100+ default. 200 works better.
|
| 77 |
+
model = LogisticRegression(C=1000, max_iter=200, n_jobs=-1, solver='lbfgs', multi_class='multinomial')
|
| 78 |
model.fit(X_train_resampled, y_train_resampled)
|
| 79 |
|
| 80 |
# Evaluate Model
|
|
|
|
| 165 |
# Gradio interface Creation
|
| 166 |
with gr.Blocks(theme=gr.themes.Soft(primary_hue="blue")) as demo:
|
| 167 |
gr.Markdown("# Text Tone Sentimental Analyzer")
|
| 168 |
+
gr.Markdown("Be mindful of punctuation as it affects results. Slang is unaccounted for due to dataset constraints.")
|
| 169 |
|
| 170 |
with gr.Row():
|
| 171 |
with gr.Column(scale=3):
|
|
|
|
| 177 |
analyze_button = gr.Button("Analyze Tone", variant="primary")
|
| 178 |
|
| 179 |
with gr.Column(scale=2):
|
|
|
|
| 180 |
tone_dropdown = gr.Dropdown(
|
| 181 |
choices=sorted(df['label'].unique().tolist()),
|
| 182 |
label="Select a tone to view an example below."
|
| 183 |
)
|
| 184 |
+
|
| 185 |
+
gr.Markdown("<br>", elem_id="line-break-1")
|
| 186 |
+
|
| 187 |
with gr.Row():
|
| 188 |
with gr.Column(scale=1):
|
| 189 |
result_message = gr.Markdown()
|
|
|
|
| 191 |
with gr.Row():
|
| 192 |
with gr.Column(scale=2):
|
| 193 |
plot_output = gr.Plot(label="Tone Probabilities")
|
| 194 |
+
|
|
|
|
|
|
|
| 195 |
with gr.Row():
|
| 196 |
examples_output = gr.Dataframe(
|
| 197 |
headers=["Examples of similar texts"],
|
| 198 |
datatype=["str"],
|
| 199 |
label="Example texts with similar tone"
|
| 200 |
)
|
| 201 |
+
|
| 202 |
+
# Button callback
|
| 203 |
analyze_button.click(
|
| 204 |
fn=analyze_tone,
|
| 205 |
inputs=[text_input, tone_dropdown],
|
| 206 |
+
outputs=[result_message, plot_output, examples_output]
|
| 207 |
)
|
| 208 |
|
| 209 |
+
# Dropdown change
|
| 210 |
tone_dropdown.change(
|
| 211 |
fn=get_tone_examples,
|
| 212 |
inputs=tone_dropdown,
|