Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
|
@@ -3,6 +3,15 @@ import gradio as gr
|
|
| 3 |
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM, AutoModelForCausalLM, BitsAndBytesConfig
|
| 4 |
import torch
|
| 5 |
import os
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 6 |
|
| 7 |
# Set up device
|
| 8 |
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
|
@@ -200,23 +209,13 @@ def end_interview(state):
|
|
| 200 |
return state["conversation"], state
|
| 201 |
|
| 202 |
def clear_state():
|
| 203 |
-
return [],
|
| 204 |
-
"name": "",
|
| 205 |
-
"domain": "",
|
| 206 |
-
"company": "",
|
| 207 |
-
"level": "",
|
| 208 |
-
"asked_questions": [],
|
| 209 |
-
"asked_subtopics": [],
|
| 210 |
-
"conversation": [],
|
| 211 |
-
"evaluations": [],
|
| 212 |
-
"interview_active": False
|
| 213 |
-
}
|
| 214 |
|
| 215 |
# ===============================
|
| 216 |
# Gradio UI
|
| 217 |
# ===============================
|
| 218 |
with gr.Blocks() as demo:
|
| 219 |
-
gr.Markdown("# 🧠 AI Mock Interview with Evaluation
|
| 220 |
|
| 221 |
with gr.Row():
|
| 222 |
name_input = gr.Textbox(label="Your Name")
|
|
@@ -247,4 +246,4 @@ with gr.Blocks() as demo:
|
|
| 247 |
exit_button.click(end_interview, inputs=state, outputs=[chatbot, state])
|
| 248 |
clear_button.click(clear_state, outputs=[chatbot, state])
|
| 249 |
|
| 250 |
-
demo.launch()
|
|
|
|
| 3 |
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM, AutoModelForCausalLM, BitsAndBytesConfig
|
| 4 |
import torch
|
| 5 |
import os
|
| 6 |
+
import gradio_client.utils as client_utils
|
| 7 |
+
|
| 8 |
+
# Workaround for Gradio schema generation issue
|
| 9 |
+
def _patched_json_schema_to_python_type(schema, defs=None):
|
| 10 |
+
if isinstance(schema, bool):
|
| 11 |
+
return "Any" if schema else "None"
|
| 12 |
+
return client_utils._json_schema_to_python_type(schema, defs)
|
| 13 |
+
|
| 14 |
+
client_utils._json_schema_to_python_type = _patched_json_schema_to_python_type
|
| 15 |
|
| 16 |
# Set up device
|
| 17 |
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
|
|
|
| 209 |
return state["conversation"], state
|
| 210 |
|
| 211 |
def clear_state():
|
| 212 |
+
return [], reset_state("", "", "", "Entry-Level")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 213 |
|
| 214 |
# ===============================
|
| 215 |
# Gradio UI
|
| 216 |
# ===============================
|
| 217 |
with gr.Blocks() as demo:
|
| 218 |
+
gr.Markdown("# 🧠 AI Mock Interview with Evaluation")
|
| 219 |
|
| 220 |
with gr.Row():
|
| 221 |
name_input = gr.Textbox(label="Your Name")
|
|
|
|
| 246 |
exit_button.click(end_interview, inputs=state, outputs=[chatbot, state])
|
| 247 |
clear_button.click(clear_state, outputs=[chatbot, state])
|
| 248 |
|
| 249 |
+
demo.launch(ssr=False)
|