Spaces:
Runtime error
Runtime error
File size: 2,482 Bytes
4a07c43 f3695f1 65798e0 f3695f1 24f9235 f3695f1 ae96dc5 f3695f1 d4e4b48 e73fe2f f3695f1 65798e0 93326ce 65798e0 f3695f1 93326ce f3695f1 e73fe2f f3695f1 e73fe2f f3695f1 0fa9601 e73fe2f f3695f1 5b908f1 e73fe2f f3695f1 5b908f1 f3695f1 e73fe2f 5ffc083 f3695f1 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 |
'''NEURAL STYLE TRANSFER '''
import gradio as gr
import tensorflow as tf
import tensorflow_hub as hub
import PIL
from PIL import Image
import numpy as np
# import time
# import requests
#import cv2
# !mkdir nstmodel
# !wget -c https://storage.googleapis.com/tfhub-modules/google/magenta/arbitrary-image-stylization-v1-256/2.tar.gz -O - | tar -xz -C /nstmodel
# import tensorflow.keras
# from PIL import Image, ImageOps
#import requests
#import tarfile
#MODEL_PATH='Nst_model'
# Disable scientific notation for clarity
np.set_printoptions(suppress=True)
# Load model from TF-Hub
model = hub.load('https://tfhub.dev/google/magenta/arbitrary-image-stylization-v1-256/2')
# Load the model
#model = tf.keras.models.load_model(MODEL_PATH)
def tensor_to_image(tensor):
tensor = tensor*255
tensor = np.array(tensor, dtype=np.uint8)
if np.ndim(tensor)>3:
assert tensor.shape[0] == 1
tensor = tensor[0]
return PIL.Image.fromarray(tensor)
"""## Grayscaling image for testing purpose to check if we could get better results.
def gray_scaled(inp_img):
gray = cv2.cvtColor(inp_img, cv2.COLOR_BGR2GRAY)
gray_img = np.zeros_like(inp_img)
gray_img[:,:,0] = gray
gray_img[:,:,1] = gray
gray_img[:,:,2] = gray
return gray_img
"""
##Transformation
def transform_my_model(content_image,style_image):
# Convert to float32 numpy array, add batch dimension, and normalize to range [0, 1]
#content_image=gray_scaled(content_image)
content_image = content_image.astype(np.float32)[np.newaxis, ...] / 255.
style_image = style_image.astype(np.float32)[np.newaxis, ...] / 255.
#Resizing image
#style_image = tf.image.resize(style_image, (256, 256))
# Stylize image
outputs = model(tf.constant(content_image), tf.constant(style_image))
stylized_image = outputs[0]
# stylized = tf.image.resize(stylized_image, (356, 356))
stylized_image =tensor_to_image(stylized_image)
return stylized_image
image1 = gr.inputs.Image(label="Content Image") #CONTENT IMAGE
image2 = gr.inputs.Image(label="Style Image") #STYLE IMAGE
stylizedimg=gr.outputs.Image(label="Result")
gr.Interface(fn=transform_my_model, inputs= [image1,image2] , outputs= stylizedimg,title='Style Transfer',theme='seafoam',examples=[['Content_Images/contnt12.jpg','VG516.jpg']],article="References-\n\nExploring the structure of a real-time, arbitrary neural artistic stylization network. Golnaz Ghiasi, Honglak Lee, Manjunath Kudlur, Vincent Dumoulin.").launch(debug=True)
|