Basu03 commited on
Commit
31d912a
·
1 Parent(s): f90498e

vertex ai minor bugs 3

Browse files
Files changed (2) hide show
  1. .DS_Store +0 -0
  2. app.py +85 -55
.DS_Store CHANGED
Binary files a/.DS_Store and b/.DS_Store differ
 
app.py CHANGED
@@ -5,13 +5,10 @@ from src.interview_logic import EXCEL_QUESTIONS # Import questions for state bui
5
  # Initialize the graph, which is stateless and operates on the state dict we provide.
6
  graph = build_graph()
7
 
8
- def chat_fn(message: str, history: list[list[str]]):
9
  """
10
- Main chat function for Gradio.
11
-
12
- THE FIX: This function now returns a single string for the bot's reply.
13
- Gradio's ChatInterface handles the history update automatically and reliably,
14
- which prevents the submit button from disappearing.
15
  """
16
  # 1. Convert Gradio's history (list of lists) into our graph's internal format (list of tuples)
17
  internal_history = []
@@ -19,32 +16,28 @@ def chat_fn(message: str, history: list[list[str]]):
19
  if user_msg:
20
  internal_history.append(("user", user_msg))
21
  if assistant_msg:
22
- # Split assistant messages that might contain multiple parts from previous turns
23
- # This is important for correctly reconstructing the state
24
  parts = assistant_msg.split("\n\n")
25
  for part in parts:
26
  internal_history.append(("ai", part))
27
-
28
- # Store the length of the history *before* adding the new message.
29
- # We will use this to find out what the bot's new reply is.
30
- len_before = len(internal_history)
31
 
32
- # Add the user's new message to the history for the graph to process
33
- internal_history.append(("user", message))
 
34
 
35
  # 2. Build the state dictionary to pass to the graph on every turn.
36
  question_count = sum(1 for role, content in internal_history if content in EXCEL_QUESTIONS)
37
 
38
  current_state = {
39
- "interview_status": 0 if not history else 1, # Status is 0 only if history is empty
40
  "interview_history": internal_history,
41
  "questions": EXCEL_QUESTIONS,
42
  "question_index": question_count,
43
- "evaluations": [], # This remains stateless for simplicity
44
  }
45
 
46
  # 3. Invoke the graph with the current state.
47
- print(f"Invoking graph with current state...")
48
  new_state = graph.invoke(current_state)
49
 
50
  # 4. Extract ONLY the new messages generated by the bot in this turn.
@@ -55,47 +48,84 @@ def chat_fn(message: str, history: list[list[str]]):
55
  return "\n\n".join(bot_responses)
56
 
57
 
58
- # Create the ChatInterface.
59
- demo = gr.ChatInterface(
60
- fn=chat_fn,
61
- title="🤖 AI-Powered Excel Interviewer (Phi-3 Mini)",
62
- description="An AI-powered interview system that asks Excel-related questions and provides feedback. Click one of the examples or type a message like 'start' to begin.",
63
- chatbot=gr.Chatbot(
64
- show_copy_button=True,
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
65
  height=600,
 
66
  placeholder="The interview will begin after you send your first message.",
67
  avatar_images=(None, "https://upload.wikimedia.org/wikipedia/commons/1/1d/Microsoft_Excel_2013-2019_logo.svg")
68
- ),
69
- textbox=gr.Textbox(
70
- placeholder="Type your answer here and press Enter...",
71
- label="Your Response",
72
- lines=3
73
- ),
74
- theme="soft",
75
- submit_btn="Submit Answer",
76
- examples=[
77
- "I'm ready to start the interview",
78
- "Let's begin",
79
- "Start the assessment"
80
- ],
81
- cache_examples=False
82
- )
83
 
84
- # Add custom CSS for better styling
85
- demo.css = """
86
- .chat-message {
87
- font-size: 16px;
88
- line-height: 1.5;
89
- }
90
- .gradio-container {
91
- max-width: 1200px;
92
- margin: 0 auto;
93
- }
94
- """
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
95
 
96
  if __name__ == "__main__":
97
- demo.launch(
98
- share=False,
99
- debug=True,
100
- show_error=True
101
- )
 
5
  # Initialize the graph, which is stateless and operates on the state dict we provide.
6
  graph = build_graph()
7
 
8
+ def run_graph_logic(message: str, history: list[list[str]]):
9
  """
10
+ This helper function contains the core logic for running the LangGraph chain.
11
+ It's separated from the UI code for clarity.
 
 
 
12
  """
13
  # 1. Convert Gradio's history (list of lists) into our graph's internal format (list of tuples)
14
  internal_history = []
 
16
  if user_msg:
17
  internal_history.append(("user", user_msg))
18
  if assistant_msg:
19
+ # Split combined messages back into individual parts
 
20
  parts = assistant_msg.split("\n\n")
21
  for part in parts:
22
  internal_history.append(("ai", part))
 
 
 
 
23
 
24
+ # NOTE: The user's new message is already in the history passed to this function.
25
+ # We find the length of the history *before* the bot adds its response.
26
+ len_before = len(internal_history)
27
 
28
  # 2. Build the state dictionary to pass to the graph on every turn.
29
  question_count = sum(1 for role, content in internal_history if content in EXCEL_QUESTIONS)
30
 
31
  current_state = {
32
+ "interview_status": 0 if len(history) <= 1 else 1, # Status is 0 only for the very first message
33
  "interview_history": internal_history,
34
  "questions": EXCEL_QUESTIONS,
35
  "question_index": question_count,
36
+ "evaluations": [],
37
  }
38
 
39
  # 3. Invoke the graph with the current state.
40
+ print("Invoking graph with current state...")
41
  new_state = graph.invoke(current_state)
42
 
43
  # 4. Extract ONLY the new messages generated by the bot in this turn.
 
48
  return "\n\n".join(bot_responses)
49
 
50
 
51
+ def user_sends_message(user_message: str, history: list[list[str]]):
52
+ """
53
+ This function is called when the user submits their message.
54
+ It provides instant UI feedback by adding the user's message to the chat,
55
+ then streams the bot's response.
56
+ """
57
+ # Append the user's message to the history for immediate display
58
+ history.append([user_message, None])
59
+
60
+ # Get the bot's response from our backend logic
61
+ bot_response = run_graph_logic(user_message, history)
62
+
63
+ # Update the last message in the history with the bot's full response
64
+ history[-1][1] = bot_response
65
+
66
+ # Return the updated history for the chatbot and an empty string to clear the textbox
67
+ return history, ""
68
+
69
+
70
+ def clear_chat():
71
+ """Called when the 'Clear' button is pressed."""
72
+ return [], ""
73
+
74
+
75
+ # --- MANUAL UI LAYOUT WITH GR.BLOCKS ---
76
+ with gr.Blocks(theme="soft", css=".gradio-container {max-width: 1200px; margin: 0 auto;}") as demo:
77
+ gr.Markdown(
78
+ """
79
+ # 🤖 AI-Powered Excel Interviewer (Phi-3 Mini)
80
+ An AI-powered interview system that asks Excel-related questions and provides feedback.
81
+ Click one of the examples or type a message like 'start' to begin.
82
+ """
83
+ )
84
+
85
+ chatbot = gr.Chatbot(
86
+ label="Interview Conversation",
87
  height=600,
88
+ show_copy_button=True,
89
  placeholder="The interview will begin after you send your first message.",
90
  avatar_images=(None, "https://upload.wikimedia.org/wikipedia/commons/1/1d/Microsoft_Excel_2013-2019_logo.svg")
91
+ )
 
 
 
 
 
 
 
 
 
 
 
 
 
 
92
 
93
+ # Create a row for the textbox and submit button
94
+ with gr.Row():
95
+ user_input = gr.Textbox(
96
+ show_label=False,
97
+ placeholder="Type your answer here and press Enter...",
98
+ scale=5 # Make the textbox larger
99
+ )
100
+ submit_btn = gr.Button("Submit", variant="primary", scale=1)
101
+
102
+ # Create a row for the examples and clear button
103
+ with gr.Row():
104
+ clear_btn = gr.Button("Clear and Restart Interview", variant="stop")
105
+ gr.Examples(
106
+ examples=["I'm ready to start the interview", "Let's begin", "Start the assessment"],
107
+ inputs=user_input
108
+ )
109
+
110
+ # --- EVENT LISTENERS ---
111
+ # Define what happens when the user clicks the submit button or presses Enter
112
+ submit_btn.click(
113
+ fn=user_sends_message,
114
+ inputs=[user_input, chatbot],
115
+ outputs=[chatbot, user_input] # Update chatbot and clear user_input
116
+ )
117
+ user_input.submit(
118
+ fn=user_sends_message,
119
+ inputs=[user_input, chatbot],
120
+ outputs=[chatbot, user_input]
121
+ )
122
+ # Define what happens when the user clicks the clear button
123
+ clear_btn.click(
124
+ fn=clear_chat,
125
+ inputs=None,
126
+ outputs=[chatbot, user_input],
127
+ queue=False # Clearing should be instant
128
+ )
129
 
130
  if __name__ == "__main__":
131
+ demo.launch(debug=True, show_error=True)