Intellectualtech commited on
Commit
fb3e333
·
verified ·
1 Parent(s): cc151e0

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +9 -72
app.py CHANGED
@@ -1,11 +1,7 @@
1
- import gradio as gr
2
  from huggingface_hub import InferenceClient
3
  from typing import List, Tuple
4
  import logging
5
- import PyPDF2
6
- import pytesseract
7
- from PIL import Image
8
- import io
9
 
10
  # Configure logging for better debugging and monitoring
11
  logging.basicConfig(
@@ -22,47 +18,8 @@ except Exception as e:
22
  logger.error(f"Failed to initialize InferenceClient: {str(e)}")
23
  raise
24
 
25
- def extract_text_from_pdf(pdf_file) -> str:
26
- """
27
- Extracts text from an uploaded PDF file.
28
- Args:
29
- pdf_file: Path to the uploaded PDF file or file-like object.
30
- Returns:
31
- str: Extracted text from the PDF.
32
- """
33
- try:
34
- text = ""
35
- with open(pdf_file, "rb") as file:
36
- reader = PyPDF2.PdfReader(file)
37
- for page in reader.pages:
38
- text += page.extract_text() or ""
39
- logger.info("Successfully extracted text from PDF")
40
- return text.strip()
41
- except Exception as e:
42
- logger.error(f"Error extracting text from PDF: {str(e)}")
43
- return ""
44
-
45
- def extract_text_from_image(image_file) -> str:
46
- """
47
- Extracts text from an uploaded image file using OCR.
48
- Args:
49
- image_file: Path to the uploaded image file or file-like object.
50
- Returns:
51
- str: Extracted text from the image.
52
- """
53
- try:
54
- image = Image.open(image_file)
55
- text = pytesseract.image_to_string(image)
56
- logger.info("Successfully extracted text from image")
57
- return text.strip()
58
- except Exception as e:
59
- logger.error(f"Error extracting text from image: {str(e)}")
60
- return ""
61
-
62
  def respond(
63
  message: str,
64
- pdf_file: str,
65
- image_file: str,
66
  history: List[Tuple[str, str]],
67
  system_message: str,
68
  max_tokens: int,
@@ -70,11 +27,9 @@ def respond(
70
  top_p: float,
71
  ) -> str:
72
  """
73
- Generates an educational response to a student's query, including text from uploaded PDFs or images.
74
  Args:
75
  message (str): The student's input question or query.
76
- pdf_file (str): Path to the uploaded PDF file.
77
- image_file (str): Path to the uploaded image file.
78
  history (List[Tuple[str, str]]): Chat history with student and AI teacher messages.
79
  system_message (str): The system prompt defining the AI teacher's behavior.
80
  max_tokens (int): Maximum number of tokens to generate.
@@ -87,8 +42,8 @@ def respond(
87
  RuntimeError: If the API call fails.
88
  """
89
  # Validate input parameters
90
- if not message.strip() and not pdf_file and not image_file:
91
- raise ValueError("At least one of message, PDF, or image must be provided")
92
  if max_tokens < 1 or max_tokens > 2048:
93
  raise ValueError("max_tokens must be between 1 and 2048")
94
  if temperature < 0.1 or temperature > 2.0:
@@ -96,20 +51,6 @@ def respond(
96
  if top_p < 0.1 or top_p > 1.0:
97
  raise ValueError("top_p must be between 0.1 and 1.0")
98
 
99
- # Combine text from message, PDF, and image
100
- combined_message = message.strip()
101
- if pdf_file:
102
- pdf_text = extract_text_from_pdf(pdf_file)
103
- if pdf_text:
104
- combined_message += "\n\n[From PDF]:\n" + pdf_text
105
- if image_file:
106
- image_text = extract_text_from_image(image_file)
107
- if image_text:
108
- combined_message += "\n\n[From Image]:\n" + image_text
109
-
110
- if not combined_message.strip():
111
- raise ValueError("No valid text extracted from inputs")
112
-
113
  # Construct the message history
114
  messages = [{"role": "system", "content": system_message}]
115
  for user_msg, assistant_msg in history:
@@ -117,7 +58,7 @@ def respond(
117
  messages.append({"role": "user", "content": user_msg})
118
  if assistant_msg:
119
  messages.append({"role": "assistant", "content": assistant_msg})
120
- messages.append({"role": "user", "content": combined_message})
121
 
122
  response = ""
123
  try:
@@ -137,23 +78,20 @@ def respond(
137
 
138
  def main():
139
  """
140
- Sets up and launches the Gradio ChatInterface for the AI Teacher chatbot with PDF and image upload support.
141
  """
142
  # Define default system message for an AI teacher
143
  default_system_message = (
144
  "You are an AI Teacher, a knowledgeable and patient educator dedicated to helping students and learners. "
145
  "Your goal is to explain concepts clearly, provide step-by-step guidance, and encourage critical thinking. "
146
  "Adapt your explanations to the learner's level, ask follow-up questions to deepen understanding, and provide examples where helpful. "
147
- "Be supportive, professional, and engaging in all interactions. "
148
- "If provided with text from uploaded PDFs or images, treat it as part of the student's question and respond accordingly."
149
  )
150
 
151
- # Create Gradio ChatInterface with file upload components
152
  demo = gr.ChatInterface(
153
  fn=respond,
154
  additional_inputs=[
155
- gr.File(label="Upload PDF", file_types=[".pdf"]),
156
- gr.File(label="Upload Image", file_types=[".png", ".jpg", ".jpeg"]),
157
  gr.Textbox(
158
  value=default_system_message,
159
  label="AI Teacher Prompt",
@@ -188,8 +126,7 @@ def main():
188
  title="AI Teacher: Your Study Companion",
189
  description=(
190
  "Welcome to AI Teacher, your personal guide for learning and studying! "
191
- "Ask questions by typing, or upload a PDF or image containing your question. "
192
- "I'll provide clear explanations, examples, and tips to help you succeed. "
193
  "Adjust the settings to customize how I respond to your questions."
194
  ),
195
  theme="soft",
 
1
+ import gradio as gr
2
  from huggingface_hub import InferenceClient
3
  from typing import List, Tuple
4
  import logging
 
 
 
 
5
 
6
  # Configure logging for better debugging and monitoring
7
  logging.basicConfig(
 
18
  logger.error(f"Failed to initialize InferenceClient: {str(e)}")
19
  raise
20
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
21
  def respond(
22
  message: str,
 
 
23
  history: List[Tuple[str, str]],
24
  system_message: str,
25
  max_tokens: int,
 
27
  top_p: float,
28
  ) -> str:
29
  """
30
+ Generates an educational response to a student's query using the HuggingFace Inference API.
31
  Args:
32
  message (str): The student's input question or query.
 
 
33
  history (List[Tuple[str, str]]): Chat history with student and AI teacher messages.
34
  system_message (str): The system prompt defining the AI teacher's behavior.
35
  max_tokens (int): Maximum number of tokens to generate.
 
42
  RuntimeError: If the API call fails.
43
  """
44
  # Validate input parameters
45
+ if not message.strip():
46
+ raise ValueError("Input message cannot be empty")
47
  if max_tokens < 1 or max_tokens > 2048:
48
  raise ValueError("max_tokens must be between 1 and 2048")
49
  if temperature < 0.1 or temperature > 2.0:
 
51
  if top_p < 0.1 or top_p > 1.0:
52
  raise ValueError("top_p must be between 0.1 and 1.0")
53
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
54
  # Construct the message history
55
  messages = [{"role": "system", "content": system_message}]
56
  for user_msg, assistant_msg in history:
 
58
  messages.append({"role": "user", "content": user_msg})
59
  if assistant_msg:
60
  messages.append({"role": "assistant", "content": assistant_msg})
61
+ messages.append({"role": "user", "content": message})
62
 
63
  response = ""
64
  try:
 
78
 
79
  def main():
80
  """
81
+ Sets up and launches the Gradio ChatInterface for the AI Teacher chatbot.
82
  """
83
  # Define default system message for an AI teacher
84
  default_system_message = (
85
  "You are an AI Teacher, a knowledgeable and patient educator dedicated to helping students and learners. "
86
  "Your goal is to explain concepts clearly, provide step-by-step guidance, and encourage critical thinking. "
87
  "Adapt your explanations to the learner's level, ask follow-up questions to deepen understanding, and provide examples where helpful. "
88
+ "Be supportive, professional, and engaging in all interactions."
 
89
  )
90
 
91
+ # Create Gradio ChatInterface with settings compatible with older Gradio versions
92
  demo = gr.ChatInterface(
93
  fn=respond,
94
  additional_inputs=[
 
 
95
  gr.Textbox(
96
  value=default_system_message,
97
  label="AI Teacher Prompt",
 
126
  title="AI Teacher: Your Study Companion",
127
  description=(
128
  "Welcome to AI Teacher, your personal guide for learning and studying! "
129
+ "Ask questions about any subject, and I'll provide clear explanations, examples, and tips to help you succeed. "
 
130
  "Adjust the settings to customize how I respond to your questions."
131
  ),
132
  theme="soft",