MasterShomya commited on
Commit
838fbb0
·
verified ·
1 Parent(s): 2c0702b

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +26 -16
app.py CHANGED
@@ -1,19 +1,29 @@
1
- from tensorflow.keras.models import model_from_json
2
  import tensorflow as tf
 
 
3
  import joblib
4
  from tensorflow.keras.preprocessing.sequence import pad_sequences
5
- import gradio as gr
6
 
7
- # Load model architecture
8
- with open("model_architecture.json", "r") as f:
9
- model = model_from_json(f.read())
 
 
 
 
10
 
11
- # Load weights
12
- model.load_weights("model_weights.weights.h5")
 
 
 
 
13
 
14
- # Load tokenizer
 
15
  tokenizer = joblib.load("tokenizer.joblib")
16
 
 
17
  max_len = 40
18
 
19
  def predict_sentiment(text):
@@ -21,14 +31,14 @@ def predict_sentiment(text):
21
  padded = pad_sequences(seq, maxlen=max_len, padding='post')
22
  pred = model.predict(padded)[0][0]
23
  label = "Positive" if pred >= 0.5 else "Negative"
24
- return {label: float(pred) if label == "Positive" else 1 - float(pred)}
 
25
 
26
- demo = gr.Interface(
27
- fn=predict_sentiment,
28
- inputs=gr.Textbox(lines=2, placeholder="Enter a tweet..."),
29
- outputs=gr.Label(num_top_classes=2),
30
- title="Sentiment Analysis on Tweets",
31
- description="Enter a tweet and get predicted sentiment (Positive/Negative) and confidence score."
32
- )
33
 
34
  demo.launch()
 
 
1
  import tensorflow as tf
2
+ from tensorflow.keras.layers import Layer, Dense
3
+ import gradio as gr
4
  import joblib
5
  from tensorflow.keras.preprocessing.sequence import pad_sequences
 
6
 
7
+ # 🔸 Define Custom Layer Again
8
+ class BetterAttention(Layer):
9
+ def __init__(self, units=64, return_attention=False, **kwargs):
10
+ super(BetterAttention, self).__init__(**kwargs)
11
+ self.return_attention = return_attention
12
+ self.W = Dense(units)
13
+ self.V = Dense(1)
14
 
15
+ def call(self, inputs):
16
+ score = self.V(tf.nn.tanh(self.W(inputs)))
17
+ attention_weights = tf.nn.softmax(score, axis=1)
18
+ context_vector = attention_weights * inputs
19
+ context_vector = tf.reduce_sum(context_vector, axis=1)
20
+ return (context_vector, attention_weights) if self.return_attention else context_vector
21
 
22
+ # 🔸 Load model & tokenizer
23
+ model = tf.keras.models.load_model("model.keras", custom_objects={"BetterAttention": BetterAttention})
24
  tokenizer = joblib.load("tokenizer.joblib")
25
 
26
+ # 🔸 Define prediction
27
  max_len = 40
28
 
29
  def predict_sentiment(text):
 
31
  padded = pad_sequences(seq, maxlen=max_len, padding='post')
32
  pred = model.predict(padded)[0][0]
33
  label = "Positive" if pred >= 0.5 else "Negative"
34
+ confidence = float(pred if pred >= 0.5 else 1 - pred)
35
+ return {label: confidence}
36
 
37
+ # 🔸 Gradio Interface
38
+ demo = gr.Interface(fn=predict_sentiment,
39
+ inputs=gr.Textbox(lines=2, placeholder="Enter a tweet..."),
40
+ outputs=gr.Label(num_top_classes=2),
41
+ title="Sentiment Analysis on Tweets",
42
+ description="Enter a tweet and get predicted sentiment with confidence score.")
 
43
 
44
  demo.launch()