NASA / app.py
mohAhmad's picture
Update app.py
5578f37 verified
import streamlit as st
import pandas as pd
import matplotlib.pyplot as plt
from transformers import AutoTokenizer, AutoModelForSeq2SeqLM
# Load the model and tokenizer from Hugging Face
@st.cache_resource
def load_model():
tokenizer = AutoTokenizer.from_pretrained("t5-small") # Small T5 model for demo
model = AutoModelForSeq2SeqLM.from_pretrained("t5-small")
return tokenizer, model
tokenizer, model = load_model()
# Streamlit app
st.title("Seismic Event Prediction App")
# File upload section
uploaded_file = st.file_uploader("Upload CSV File", type=["csv"])
if uploaded_file is not None:
# Load CSV data
data = pd.read_csv(uploaded_file)
# Display the data
st.write("## Uploaded Data")
st.dataframe(data)
# Input slider for choosing an example (index between 0 and N-1)
st.write("## Select an example to visualize:")
idx = st.slider("Choose an index", 0, len(data) - 1, 0)
# Prediction and plotting
st.write("### Selected example:", idx)
st.write(data.iloc[idx])
# Plot the predictions
fig, ax = plt.subplots()
ax.plot(data['x'], label="X-axis data", color="blue")
ax.axvline(x=data.iloc[idx]['prediction'], color="red", label="Predicted Earthquake")
ax.legend()
st.pyplot(fig)
# Use the Hugging Face model to generate a simple summary or prediction based on the selected row
input_text = f"Predict seismic event for index {idx}."
inputs = tokenizer.encode(input_text, return_tensors="pt")
outputs = model.generate(inputs, max_length=50, num_beams=4, early_stopping=True)
generated_text = tokenizer.decode(outputs[0], skip_special_tokens=True)
st.write("### Model Prediction:")
st.write(generated_text)