Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -5,7 +5,7 @@ import torch
|
|
| 5 |
tokenizer = GPT2Tokenizer.from_pretrained("gpt2")
|
| 6 |
model = GPT2LMHeadModel.from_pretrained("gpt2")
|
| 7 |
|
| 8 |
-
#
|
| 9 |
course_info = {
|
| 10 |
"Engineering": ["Civil Engineering", "Mechanical Engineering", "Electrical Engineering", "Software Engineering", "etc."],
|
| 11 |
"Information Technology": ["Computer Science", "Information Systems", "Cybersecurity", "Data Science", "etc."],
|
|
@@ -21,29 +21,27 @@ course_info = {
|
|
| 21 |
|
| 22 |
# Function to generate response
|
| 23 |
def generate_response(user_input):
|
| 24 |
-
|
| 25 |
-
input_text = user_input + " ".join([course for category in course_info.values() for course in category])
|
| 26 |
-
|
| 27 |
-
# Tokenize input text
|
| 28 |
-
input_ids = tokenizer.encode(input_text, return_tensors="pt")
|
| 29 |
-
|
| 30 |
-
# Generate response
|
| 31 |
output = model.generate(input_ids, max_length=100, num_return_sequences=1, pad_token_id=tokenizer.eos_token_id)
|
| 32 |
-
|
| 33 |
-
# Decode and return response
|
| 34 |
return tokenizer.decode(output[0], skip_special_tokens=True)
|
| 35 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 36 |
# Main function to interact with the chatbot
|
| 37 |
def chat():
|
| 38 |
-
print("Welcome to the Course Chatbot!
|
| 39 |
while True:
|
| 40 |
user_input = input("You: ")
|
| 41 |
-
if
|
| 42 |
-
|
| 43 |
-
break
|
| 44 |
else:
|
| 45 |
bot_response = generate_response(user_input)
|
| 46 |
-
|
| 47 |
|
| 48 |
if __name__ == "__main__":
|
| 49 |
chat()
|
|
|
|
| 5 |
tokenizer = GPT2Tokenizer.from_pretrained("gpt2")
|
| 6 |
model = GPT2LMHeadModel.from_pretrained("gpt2")
|
| 7 |
|
| 8 |
+
# Define course information
|
| 9 |
course_info = {
|
| 10 |
"Engineering": ["Civil Engineering", "Mechanical Engineering", "Electrical Engineering", "Software Engineering", "etc."],
|
| 11 |
"Information Technology": ["Computer Science", "Information Systems", "Cybersecurity", "Data Science", "etc."],
|
|
|
|
| 21 |
|
| 22 |
# Function to generate response
|
| 23 |
def generate_response(user_input):
|
| 24 |
+
input_ids = tokenizer.encode(user_input, return_tensors="pt")
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 25 |
output = model.generate(input_ids, max_length=100, num_return_sequences=1, pad_token_id=tokenizer.eos_token_id)
|
|
|
|
|
|
|
| 26 |
return tokenizer.decode(output[0], skip_special_tokens=True)
|
| 27 |
|
| 28 |
+
# Function to list courses based on category
|
| 29 |
+
def list_courses(category):
|
| 30 |
+
if category in course_info:
|
| 31 |
+
return f"{category} courses include: {', '.join(course_info[category])}"
|
| 32 |
+
else:
|
| 33 |
+
return "Sorry, I couldn't find any information on that category."
|
| 34 |
+
|
| 35 |
# Main function to interact with the chatbot
|
| 36 |
def chat():
|
| 37 |
+
print("Welcome to the Course Chatbot! Feel free to ask me anything.")
|
| 38 |
while True:
|
| 39 |
user_input = input("You: ")
|
| 40 |
+
if any(category in user_input for category in course_info.keys()):
|
| 41 |
+
bot_response = list_courses(user_input)
|
|
|
|
| 42 |
else:
|
| 43 |
bot_response = generate_response(user_input)
|
| 44 |
+
print("Bot:", bot_response)
|
| 45 |
|
| 46 |
if __name__ == "__main__":
|
| 47 |
chat()
|