0xrushi's picture
Update app.py
6886d5c
raw
history blame
2.25 kB
import gradio as gr
import numpy as np
from huggingface_hub import from_pretrained_keras
def loss(margin=1):
"""Provides 'constrastive_loss' an enclosing scope with variable 'margin'.
Arguments:
margin: Integer, defines the baseline for distance for which pairs
should be classified as dissimilar. - (default is 1).
Returns:
'constrastive_loss' function with data ('margin') attached.
"""
# Contrastive loss = mean( (1-true_value) * square(prediction) +
# true_value * square( max(margin-prediction, 0) ))
def contrastive_loss(y_true, y_pred):
"""Calculates the constrastive loss.
Arguments:
y_true: List of labels, each label is of type float32.
y_pred: List of predictions of same length as of y_true,
each label is of type float32.
Returns:
A tensor containing constrastive loss as floating point value.
"""
square_pred = tf.math.square(y_pred)
margin_square = tf.math.square(tf.math.maximum(margin - (y_pred), 0))
return tf.math.reduce_mean(
(1 - y_true) * square_pred + (y_true) * margin_square
)
return contrastive_loss
siamese = from_pretrained_keras("keras-io/siamese-contrastive", custom_objects={"contrastive_loss": loss})
def predict_image(img1, img2):
assert img1.shape == (28, 28)
assert img1.shape == img2.shape
print('img 1 shape', img1.shape)
img1 = np.expand_dims(img1, 0)
img2 = np.expand_dims(img2, 0)
lab = str(siamese.predict([img1, img2])[0][0])
return lab
title = "Image similarity estimation using a Siamese Network with a contrastive loss"
description = "This space implements a siamese network to compare similar images of the MNIST dataset. To use it simply draw two numbers in the input boxes."
article = """<p style='text-align: center'>
<a href='https://keras.io/examples/vision/siamese_contrastive/' target='_blank'>Keras Example given by Mehdi</a>
<br>
Space by @rushic24
</p>
"""
iface = gr.Interface(predict_image, inputs=["sketchpad", "sketchpad"], outputs="label", title=title, description=description, article=article)
iface.launch(debug='True')