Set Gemma 3B as default model and improve response display
Browse files
app.py
CHANGED
|
@@ -19,8 +19,8 @@ AVAILABLE_MODELS = {
|
|
| 19 |
# Initialize the social graph manager
|
| 20 |
social_graph = SocialGraphManager("social_graph.json")
|
| 21 |
|
| 22 |
-
# Initialize the suggestion generator with
|
| 23 |
-
suggestion_generator = SuggestionGenerator("
|
| 24 |
|
| 25 |
# Test the model to make sure it's working
|
| 26 |
test_result = suggestion_generator.test_model()
|
|
@@ -140,7 +140,7 @@ def generate_suggestions(
|
|
| 140 |
user_input,
|
| 141 |
suggestion_type,
|
| 142 |
selected_topic=None,
|
| 143 |
-
model_name="
|
| 144 |
temperature=0.7,
|
| 145 |
progress=gr.Progress(),
|
| 146 |
):
|
|
@@ -298,10 +298,16 @@ def generate_suggestions(
|
|
| 298 |
result = "No suggestions available. Please try a different option."
|
| 299 |
|
| 300 |
print(f"Returning result: {result[:100]}...")
|
|
|
|
|
|
|
| 301 |
|
| 302 |
# Complete the progress
|
| 303 |
progress(1.0, desc="Completed!")
|
| 304 |
|
|
|
|
|
|
|
|
|
|
|
|
|
| 305 |
return result
|
| 306 |
|
| 307 |
|
|
@@ -405,7 +411,7 @@ with gr.Blocks(title="Will's AAC Communication Aid") as demo:
|
|
| 405 |
with gr.Row():
|
| 406 |
model_dropdown = gr.Dropdown(
|
| 407 |
choices=list(AVAILABLE_MODELS.keys()),
|
| 408 |
-
value="
|
| 409 |
label="Language Model",
|
| 410 |
info="Select which AI model to use for generating responses",
|
| 411 |
)
|
|
@@ -442,6 +448,7 @@ with gr.Blocks(title="Will's AAC Communication Aid") as demo:
|
|
| 442 |
suggestions_output = gr.Markdown(
|
| 443 |
label="My Suggested Responses",
|
| 444 |
value="Suggested responses will appear here...",
|
|
|
|
| 445 |
)
|
| 446 |
|
| 447 |
# Set up event handlers
|
|
|
|
| 19 |
# Initialize the social graph manager
|
| 20 |
social_graph = SocialGraphManager("social_graph.json")
|
| 21 |
|
| 22 |
+
# Initialize the suggestion generator with Gemma 3B (default)
|
| 23 |
+
suggestion_generator = SuggestionGenerator("google/gemma-3-1b-it")
|
| 24 |
|
| 25 |
# Test the model to make sure it's working
|
| 26 |
test_result = suggestion_generator.test_model()
|
|
|
|
| 140 |
user_input,
|
| 141 |
suggestion_type,
|
| 142 |
selected_topic=None,
|
| 143 |
+
model_name="google/gemma-3-1b-it",
|
| 144 |
temperature=0.7,
|
| 145 |
progress=gr.Progress(),
|
| 146 |
):
|
|
|
|
| 298 |
result = "No suggestions available. Please try a different option."
|
| 299 |
|
| 300 |
print(f"Returning result: {result[:100]}...")
|
| 301 |
+
print(f"Result type: {type(result)}")
|
| 302 |
+
print(f"Result length: {len(result)}")
|
| 303 |
|
| 304 |
# Complete the progress
|
| 305 |
progress(1.0, desc="Completed!")
|
| 306 |
|
| 307 |
+
# Make sure we're returning a non-empty string
|
| 308 |
+
if not result or len(result.strip()) == 0:
|
| 309 |
+
result = "No response was generated. Please try again with different settings."
|
| 310 |
+
|
| 311 |
return result
|
| 312 |
|
| 313 |
|
|
|
|
| 411 |
with gr.Row():
|
| 412 |
model_dropdown = gr.Dropdown(
|
| 413 |
choices=list(AVAILABLE_MODELS.keys()),
|
| 414 |
+
value="google/gemma-3-1b-it",
|
| 415 |
label="Language Model",
|
| 416 |
info="Select which AI model to use for generating responses",
|
| 417 |
)
|
|
|
|
| 448 |
suggestions_output = gr.Markdown(
|
| 449 |
label="My Suggested Responses",
|
| 450 |
value="Suggested responses will appear here...",
|
| 451 |
+
elem_id="suggestions_output", # Add an ID for easier debugging
|
| 452 |
)
|
| 453 |
|
| 454 |
# Set up event handlers
|