ryfazrin's picture
Update app.py
22956ea verified
import gradio as gr
import tensorflow as tf
import numpy as np
# 1. Load model TFLite (Pastikan nama file sesuai dengan yang Anda upload)
# Jika file model Anda bernama lain, ganti "model.tflite" di bawah ini
try:
interpreter = tf.lite.Interpreter(model_path="tiny_sentiment_model_imdb.tflite")
interpreter.allocate_tensors()
except Exception as e:
print(f"Error loading model: {e}")
def predict_sentiment(text):
# Logika inferensi (Sederhana sebagai contoh)
# Catatan: Anda perlu menambahkan tokenizer di sini agar teks bisa dibaca model
try:
input_details = interpreter.get_input_details()
output_details = interpreter.get_output_details()
# Placeholder: Proses input text ke tensor di sini
# interpreter.set_tensor(input_details[0]['index'], input_data)
interpreter.invoke()
output_data = interpreter.get_tensor(output_details[0]['index'])
# Contoh logika output (sesuaikan dengan output model Anda)
prediction = output_data[0][0]
return "Positive" if prediction > 0.5 else "Negative"
except Exception as e:
return f"Error saat prediksi: {str(e)}"
# 2. UI Gradio (Tanpa argumen 'allow_flagging' yang error)
demo = gr.Interface(
fn=predict_sentiment,
inputs=gr.Textbox(label="Masukkan Kalimat", placeholder="Ketik di sini..."),
outputs=gr.Textbox(label="Hasil Analisis"),
title="Sentimen Analisis TFLite",
flagging_mode="never" # Pengganti allow_flagging="never"
)
if __name__ == "__main__":
demo.launch()