Spaces:
Build error
Build error
File size: 1,839 Bytes
c6135fd 77fa791 6aecff3 57dc880 6aecff3 57dc880 f3c9265 77fa791 c6135fd 77fa791 c6135fd 77fa791 c6135fd 77fa791 c6135fd 77fa791 57dc880 c6135fd f3c9265 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 |
import streamlit as st
from transformers import pipeline, AutoTokenizer, AutoModelForSequenceClassification, AutoConfig
import os
# Ensure compatibility with protobuf
os.environ["PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION"] = "python"
# Path to your model directory
model_path = "./mbti_model_2"
# Load model and tokenizer with label mappings
@st.cache_resource
def load_pipeline_and_mapping():
try:
# Load model configuration to get label-to-MBTI mapping
config = AutoConfig.from_pretrained(model_path)
label_to_mbti = config.id2label if hasattr(config, "id2label") else {}
# Load the tokenizer and model
tokenizer = AutoTokenizer.from_pretrained(model_path)
model = AutoModelForSequenceClassification.from_pretrained(model_path)
pipe = pipeline("text-classification", model=model, tokenizer=tokenizer)
return pipe, label_to_mbti
except Exception as e:
st.error(f"Error loading the model: {e}")
return None, {}
pipe, label_to_mbti = load_pipeline_and_mapping()
# Streamlit UI
st.title("MBTI Personality Prediction")
st.write("Enter text below to classify the MBTI personality type:")
# Input text box
user_input = st.text_area("Input Text", placeholder="Type something here...", height=200)
# Predict button
if st.button("Predict"):
if not pipe:
st.error("The model failed to load. Please check the setup.")
elif user_input.strip():
# Generate predictions
predictions = pipe(user_input)
st.subheader("Predictions:")
for pred in predictions:
mbti_type = label_to_mbti.get(pred["label"], "Unknown")
st.write(f"**MBTI Type:** {mbti_type}, **Confidence:** {pred['score']:.4f}")
else:
st.warning("Please enter some text before clicking 'Predict'.")
|