katielink commited on
Commit
65a733b
1 Parent(s): 14167ad

Update BraTS space

Browse files
Files changed (4) hide show
  1. README.md +2 -2
  2. app.py +83 -3
  3. examples/BRATS_485.nii.gz +3 -0
  4. requirements.txt +3 -0
README.md CHANGED
@@ -1,6 +1,6 @@
1
  ---
2
- title: Brats Segmentation
3
- emoji: 🏢
4
  colorFrom: indigo
5
  colorTo: red
6
  sdk: gradio
 
1
  ---
2
+ title: Glioma Segmentation
3
+ emoji: 🧠
4
  colorFrom: indigo
5
  colorTo: red
6
  sdk: gradio
app.py CHANGED
@@ -1,7 +1,87 @@
 
1
  import gradio as gr
 
 
 
 
 
 
 
 
 
 
 
 
2
 
3
- def greet(name):
4
- return "Hello " + name + "!!"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
5
 
6
- iface = gr.Interface(fn=greet, inputs="text", outputs="text")
7
  iface.launch()
 
1
+ import os
2
  import gradio as gr
3
+ import torch
4
+ from monai import bundle
5
+ from monai.transforms import (
6
+ Compose,
7
+ LoadImaged,
8
+ EnsureChannelFirstd,
9
+ Orientationd,
10
+ NormalizeIntensityd,
11
+ Activationsd,
12
+ AsDiscreted,
13
+ ScaleIntensityd,
14
+ )
15
 
16
+ BUNDLE_NAME = 'spleen_ct_segmentation_v0.1.0'
17
+ BUNDLE_PATH = os.path.join(torch.hub.get_dir(), 'bundle', BUNDLE_NAME)
18
+
19
+ examples = ['examples/BRATS_485.nii.gz']
20
+
21
+ model, _, _ = bundle.load(
22
+ name = BUNDLE_NAME,
23
+ source = 'hf_hub',
24
+ repo = 'katielink/brats_mri_segmentation_v0.1.0',
25
+ load_ts_module=True,
26
+ )
27
+
28
+ device = "cuda:0" if torch.cuda.is_available() else "cpu"
29
+
30
+ parser = bundle.load_bundle_config(BUNDLE_PATH, 'inference.json')
31
+
32
+ preproc_transforms = Compose(
33
+ [
34
+ LoadImaged(keys=["image"]),
35
+ EnsureChannelFirstd(keys="image"),
36
+ Orientationd(keys=["image"], axcodes="RAS"),
37
+ NormalizeIntensityd(keys="image", nonzero=True, channel_wise=True),
38
+ ]
39
+ )
40
+ inferer = parser.get_parsed_content('inferer', lazy=True, eval_expr=True, instantiate=True)
41
+ post_transforms = Compose(
42
+ [
43
+ Activationsd(keys='pred', sigmoid=True),
44
+ AsDiscreted(keys='pred', threshold=0.5),
45
+ ScaleIntensityd(keys='image', minv=0., maxv=1.)
46
+ ]
47
+ )
48
+
49
+ def predict(input_file, z_axis, model=model, device=device):
50
+ data = {'image': [input_file.name]}
51
+ data = preproc_transforms(data)
52
+
53
+ model.to(device)
54
+ model.eval()
55
+ with torch.no_grad():
56
+ inputs = data['image'].to(device)
57
+ data['pred'] = inferer(inputs=inputs[None,...], network=model)
58
+ data = post_transforms(data)
59
+
60
+ input_image = data['image'].numpy()
61
+ pred_image = data['pred'].cpu().detach().numpy()
62
+
63
+ input_t1_image = input_image[0, :, :, z_axis]
64
+ input_t1c_image = input_image[1, :, :, z_axis]
65
+ input_t2_image = input_image[2, :, :, z_axis]
66
+ input_flair_image = input_image[3, :, :, z_axis]
67
+
68
+ pred_1_image = pred_image[0, 0, :, :, z_axis]
69
+ pred_2_image = pred_image[0, 1, :, :, z_axis]
70
+ pred_3_image = pred_image[0, 2, :, :, z_axis]
71
+
72
+ return input_t1c_image, pred_1_image
73
+
74
+ iface = gr.Interface(
75
+ fn=predict,
76
+ inputs=[
77
+ gr.File(label='Nifti file'),
78
+ gr.Slider(0, 200, label='z-axis', value=100)
79
+ ],
80
+ outputs=[
81
+ gr.Image(label='input image'),
82
+ gr.Image(label='segmentation')],
83
+ title='Segment Gliomas using MONAI',
84
+ examples=examples,
85
+ )
86
 
 
87
  iface.launch()
examples/BRATS_485.nii.gz ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1de6be1eeb49c788baa286a21d71546b2974bc300d5bc6ce4541e41854a0fefb
3
+ size 8327084
requirements.txt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ git+https://github.com/katielink/MONAI.git@4042-download-hf-hub-bundle
2
+ huggingface_hub
3
+ nibabel