Spaces:
Runtime error
Runtime error
Add application file
Browse files
app.py
ADDED
|
@@ -0,0 +1,67 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import gradio as gr
|
| 2 |
+
import tensorflow as tf
|
| 3 |
+
import numpy as np
|
| 4 |
+
import json
|
| 5 |
+
|
| 6 |
+
class Contrastive_Loss_2(tf.keras.losses.Loss):
|
| 7 |
+
def __init__(self, temperature=0.5, rate=0.5, name='Contrastive_Loss_2', **kwargs):
|
| 8 |
+
super(Contrastive_Loss_2, self).__init__(name=name, **kwargs)
|
| 9 |
+
self.temperature = temperature
|
| 10 |
+
self.rate = rate
|
| 11 |
+
self.cosine_sim = tf.keras.losses.CosineSimilarity(axis=-1, reduction=tf.keras.losses.Reduction.NONE)
|
| 12 |
+
|
| 13 |
+
# @tf.function
|
| 14 |
+
def call(self, z1, z2):
|
| 15 |
+
batch_size, n_dim = z1.shape
|
| 16 |
+
|
| 17 |
+
# Compute Euclid Distance loss
|
| 18 |
+
difference = z1 - z2 # (BxB) * z1 and z2 already applied soft max -> in the last axis, max dif will be 1
|
| 19 |
+
squared_norm = tf.reduce_sum(tf.square(difference), axis=1) # (B)
|
| 20 |
+
distance = tf.sqrt(squared_norm + 1e-8) # (B) * + epsilon to avoid Nan in gradient
|
| 21 |
+
mean_distance = tf.reduce_mean(distance) # () -> scalar
|
| 22 |
+
tf.debugging.check_numerics(mean_distance.numpy(), 'Distance contains NaN values.')
|
| 23 |
+
# print('distance: , ',mean_distance)
|
| 24 |
+
|
| 25 |
+
# Compute Consine Similarity loss
|
| 26 |
+
z = tf.concat((z1, z2), 0)
|
| 27 |
+
|
| 28 |
+
sim_ij = - self.cosine_sim(z[:batch_size], z[batch_size:]) # (B) -> batch_size pair
|
| 29 |
+
sim_ji = - self.cosine_sim(z[batch_size:], z[:batch_size]) # (B) -> batch_size pair
|
| 30 |
+
sim_pos = tf.concat((sim_ij,sim_ji), axis=0) # (2B) -> 2*batch_size positive pair
|
| 31 |
+
numerator = tf.math.exp(sim_pos / self.temperature) # (2B) -> 2*batch_size positive pair
|
| 32 |
+
|
| 33 |
+
sim_neg = - self.cosine_sim(tf.expand_dims(z, 1), z) # sim (Bx1xE, BxE) -> (2Bx2B)
|
| 34 |
+
mask = 1 - tf.eye(2*batch_size, dtype=tf.float32) # (2Bx2B)
|
| 35 |
+
sim_neg = mask * tf.math.exp(sim_neg / self.temperature) # (2Bx2B)
|
| 36 |
+
denominator = tf.math.reduce_sum(sim_neg, axis=-1) # (2B)
|
| 37 |
+
|
| 38 |
+
mean_cosine_similarity = tf.reduce_mean(- tf.math.log((numerator + 1e-11) / (denominator + 1e-11))) # () -> scalar
|
| 39 |
+
tf.debugging.check_numerics(mean_cosine_similarity.numpy(), 'Cosine contains NaN values.')
|
| 40 |
+
# print('similarity: , ',mean_cosine_similarity)
|
| 41 |
+
|
| 42 |
+
# Compute total loss with associated rate
|
| 43 |
+
total_loss = (1-self.rate)*mean_distance + self.rate*mean_cosine_similarity
|
| 44 |
+
tf.debugging.check_numerics(total_loss.numpy(), 'Total contains NaN values.')
|
| 45 |
+
return total_loss
|
| 46 |
+
|
| 47 |
+
model = tf.keras.models.load_model( filepath='contrastive_model.h5', custom_objects={'Contrastive_Loss_2': Contrastive_Loss_2})
|
| 48 |
+
|
| 49 |
+
with open("scene_labels.json") as labels_file:
|
| 50 |
+
labels = json.load(labels_file)
|
| 51 |
+
|
| 52 |
+
def classify_image(img):
|
| 53 |
+
pred,idx,probs = model.predict(img)
|
| 54 |
+
return {labels[i]: float(probs[i]) for i in range(len(labels))}
|
| 55 |
+
|
| 56 |
+
# def classify_image(img):
|
| 57 |
+
# arr = np.expand_dims(img, axis=0)
|
| 58 |
+
# arr = tf.keras.applications.mobilenet.preprocess_input(arr)
|
| 59 |
+
# prediction = model.predict(arr).flatten()
|
| 60 |
+
# return {labels[i]: float(prediction[i]) for i in range(45)}
|
| 61 |
+
|
| 62 |
+
image = gr.inputs.Image(shape=(256, 256))
|
| 63 |
+
label = gr.outputs.Label()
|
| 64 |
+
examples = ['airplane_002.jpg','airplane_003.jpg','airport_020.jpg','airport_075.jpg','bridge_679.jpg','cloud_227.jpg','freeway_159.jpg','forest_235.jpg']
|
| 65 |
+
|
| 66 |
+
intf = gr.Interface(fn=classify_image, inputs=image, outputs=label, examples=examples)
|
| 67 |
+
intf.launch(inline=False)
|