Spaces:
Sleeping
Sleeping
File size: 4,167 Bytes
4abdbb4 137d343 4abdbb4 137d343 4abdbb4 137d343 4abdbb4 137d343 4abdbb4 137d343 4abdbb4 5ec7797 137d343 6b048a1 5ec7797 137d343 5ec7797 137d343 4abdbb4 5ec7797 137d343 744bc90 137d343 4abdbb4 137d343 5ec7797 137d343 4abdbb4 137d343 4abdbb4 137d343 5ec7797 137d343 5ec7797 137d343 4abdbb4 137d343 4abdbb4 6b048a1 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 144 145 |
import gradio as gr
from transformers import GPT2Tokenizer, GPT2LMHeadModel
import torch
print("Loading model...")
# Load your model from Hugging Face
MODEL_NAME = "Branis333/astro-gpt2-chatbot"
tokenizer = GPT2Tokenizer.from_pretrained(MODEL_NAME)
model = GPT2LMHeadModel.from_pretrained(MODEL_NAME)
# Set model to evaluation mode
model.eval()
print("Model loaded successfully!")
def format_question(question):
"""
Automatically add question mark if not present.
Args:
question (str): The input question
Returns:
str: Question with proper punctuation
"""
question = question.strip()
# Check if question already ends with ?, !, or .
if not question.endswith(('?', '!', '.')):
question = question + '?'
return question
def answer_astronomy_question(question, max_length=150, temperature=0.7, top_p=0.9):
"""Generate an answer to an astronomy question."""
# Automatically format question with ? if needed
formatted_question = format_question(question)
# Format the prompt
prompt = f"Q: {formatted_question}\nA:"
# Tokenize input
inputs = tokenizer(prompt, return_tensors="pt")
# Generate response
with torch.no_grad():
outputs = model.generate(
**inputs,
max_new_tokens=max_length,
temperature=temperature,
top_p=top_p,
do_sample=True,
pad_token_id=tokenizer.eos_token_id,
eos_token_id=tokenizer.eos_token_id,
repetition_penalty=1.2,
)
# Decode the output
generated_text = tokenizer.decode(outputs[0], skip_special_tokens=True)
# Extract just the answer part
if "A:" in generated_text:
answer = generated_text.split("A:", 1)[1].strip()
else:
answer = generated_text.strip()
return answer
# Examples with ALL 4 parameters
examples = [
["What is a black hole?", 150, 0.7, 0.9],
["What is a constellation?", 150, 0.7, 0.9],
["What causes auroras on Earth?", 150, 0.7, 0.9],
["Explain the difference between a planet and a star.", 200, 0.7, 0.9],
["What is the Big Bang theory?", 200, 0.8, 0.9],
]
# Create Gradio interface
interface = gr.Interface(
fn=answer_astronomy_question,
inputs=[
gr.Textbox(
label="Ask an Astronomy Question",
placeholder="e.g., What is a black hole (question mark is optional)",
lines=2
),
gr.Slider(
minimum=50,
maximum=300,
value=150,
step=10,
label="Max Answer Length"
),
gr.Slider(
minimum=0.1,
maximum=1.0,
value=0.7,
step=0.1,
label="Temperature (creativity)"
),
gr.Slider(
minimum=0.1,
maximum=1.0,
value=0.9,
step=0.1,
label="Top-p (diversity)"
),
],
outputs=gr.Textbox(
label="Answer",
lines=8
),
examples=examples,
title="🌌 Astronomy GPT-2 Chatbot",
description="""
Ask questions about astronomy and space science! This chatbot is powered by a fine-tuned GPT-2 model
trained on 2,736 astronomy Q&A pairs.
**Tip:** You don't need to add a question mark - it will be added automatically! ✨
**Note:** This is an educational tool. Always verify important astronomical facts with authoritative sources.
""",
article="""
### About This Model
- **Base Model:** GPT-2
- **Training Data:** 2,736 cleaned astronomy Q&A pairs
- **Perplexity:** 1.61
- **Specialization:** Astronomy terminology, concepts, and phenomena
### Tips for Best Results:
- Ask specific, clear questions (question mark optional!)
- Lower temperature = more focused answers
- Higher temperature = more creative answers
### Model Repository
[View on Hugging Face](https://huggingface.co/Branis333/astro-gpt2-chatbot)
""",
theme=gr.themes.Soft(),
)
# Launch the app
if __name__ == "__main__":
interface.launch() |