umuthopeyildirim commited on
Commit
3fb6608
1 Parent(s): 76ad30d

Add image normalization and variable conversion in app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -4
app.py CHANGED
@@ -6,7 +6,7 @@ from PIL import Image
6
  import spaces
7
  import torch
8
  import torch.nn.functional as F
9
- from torchvision.transforms import Compose
10
  import tempfile
11
  from gradio_imageslider import ImageSlider
12
 
@@ -74,9 +74,15 @@ with gr.Blocks(css=css) as demo:
74
 
75
  h, w = image.shape[:2]
76
 
77
- image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB) / 255.0
78
- image = transform({'image': image})['image']
79
- image = torch.from_numpy(image).unsqueeze(0).to(DEVICE)
 
 
 
 
 
 
80
 
81
  depth = predict_depth(model, image)
82
  depth = F.interpolate(depth[None], (h, w),
 
6
  import spaces
7
  import torch
8
  import torch.nn.functional as F
9
+ from torchvision.transforms import Compose, Normalize
10
  import tempfile
11
  from gradio_imageslider import ImageSlider
12
 
 
74
 
75
  h, w = image.shape[:2]
76
 
77
+ # image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB) / 255.0
78
+ # image = transform({'image': image})['image']
79
+ # image = torch.from_numpy(image).unsqueeze(0).to(DEVICE)
80
+
81
+ image = torch.from_numpy(image.transpose((2, 0, 1))).to(DEVICE)
82
+ image = torch.from_numpy(image.transpose((2, 0, 1)))
83
+ image = Normalize(mean=[0.485, 0.456, 0.406], std=[
84
+ 0.229, 0.224, 0.225])(image)
85
+ image = torch.autograd.Variable(image.unsqueeze(0).cuda())
86
 
87
  depth = predict_depth(model, image)
88
  depth = F.interpolate(depth[None], (h, w),