-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathunion_unet.py
More file actions
190 lines (166 loc) · 6.26 KB
/
union_unet.py
File metadata and controls
190 lines (166 loc) · 6.26 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
import numpy as np
import matplotlib.pyplot as plt
import nibabel as nib
import seaborn as sns
import os
import pandas as pd
from itertools import combinations
from os import listdir
from os.path import isfile, join
from monai.utils import first, set_determinism
from monai.transforms import (
Activations,
AsDiscrete,
AsDiscreted,
Compose,
EnsureChannelFirstd,
Invertd,
LoadImage,
LoadImaged,
NormalizeIntensityd,
RandCropByPosNegLabeld,
RandShiftIntensityd,
SaveImage,
SaveImaged,
ScaleIntensityd,
SpatialCropd,
)
from tqdm import tqdm
from monai.data import CacheDataset, DataLoader, Dataset, decollate_batch
from monai.inferers import sliding_window_inference
from monai.handlers.utils import from_engine
from monai.networks.nets import UNet #UNet, SegResNet, AttentionUnet, UNETR, SwinUNETR,
from monai.networks.layers import Norm
from monai.metrics import DiceMetric
from monai.losses import DiceLoss, DiceCELoss, DiceFocalLoss
from monai.config import print_config, print_gpu_info
from functools import partial
import monai
import torch
import glob
from datetime import datetime
import json
import random
import shutil
import tempfile
import argparse
# parse the commandline
parser = argparse.ArgumentParser()
# data organization parameters
parser.add_argument('-i', '--input-dir', required=True, help='path to your input directory containing images (nii.gz)')
parser.add_argument('-o', '--output-dir', required=True, help='path to your output directory')
args = parser.parse_args()
def average_binary_masks(folders, output_folder):
# Ensure the output folder exists
os.makedirs(output_folder, exist_ok=True)
# Get a list of mask filenames (assuming they are the same in all folders)
common_filenames = [f for f in os.listdir(folders[0]) if f.endswith('.nii.gz') and not f.endswith('_2ch.nii.gz')]
j=0
for filename in common_filenames:
mask_sum = None
# Iterate over each folder
for folder in folders:
# Load the mask
mask_path = os.path.join(folder, filename)
mask_nii = nib.load(mask_path)
mask_data = mask_nii.get_fdata()
# Initialize mask_sum on the first iteration
if mask_sum is None:
mask_sum = np.zeros_like(mask_data)
# Add the current mask to the sum
mask_sum += mask_data
# Compute the average mask
avg_mask = mask_sum / len(folders)
# Binarize the result: any voxel with a value >= 0.5 becomes 1, else 0
avg_mask_binary = (avg_mask >= 0.5).astype(np.uint8)
# Save the new binary mask
avg_mask_nii = nib.Nifti1Image(avg_mask_binary, mask_nii.affine, mask_nii.header)
output_path = os.path.join(output_folder, filename)
nib.save(avg_mask_nii, output_path)
j=j+1
#DATA FOLDER PATH
container_dir = '/ge_seg/scripts/'
input_dir = args.input_dir
output_dir = args.output_dir
data_dir = os.path.join(container_dir, 'test')
## Inference
test_images = sorted(glob.glob(os.path.join(input_dir,"*.nii.gz")))
data_dictsTe = [{"image": image} for image in test_images]
# Set up GPU usage
print("\n#### GPU INFORMATION ###")
print_gpu_info()
device = torch.device("cuda:0" if (torch.cuda.is_available()) else "cpu")
print('Device:', device)
#Additional Info when using cuda
if device.type == 'cuda':
print(torch.cuda.get_device_name(0))
torch.cuda.empty_cache()
# Network initialization
model = UNet(
spatial_dims=3,
in_channels=1,
out_channels=2,
channels=(64, 128, 256, 512, 1024),
strides=(2, 2, 2, 2),
num_res_units=2,
norm=Norm.BATCH,
).to(device)
# Define loss function and optimizer
loss_function = DiceCELoss(to_onehot_y=True, softmax=True)
optimizer = torch.optim.AdamW(model.parameters(), 1e-4, weight_decay=1e-5)
dice_metric = DiceMetric(include_background=False, reduction="mean")
# % Training network
net_name="unet_f"
roi_size = (160,160,160)
# Define MONAI processing transforms for test data.
test_org_transforms = Compose(
[
LoadImaged(keys="image"),
EnsureChannelFirstd(keys="image"),
NormalizeIntensityd(keys=["image"], subtrahend=None, divisor=None, nonzero=True), # Z-score normalization help in highlighting the low-intensity structures.
ScaleIntensityd(keys="image", minv=0, maxv=1)
]
)
print("*** Loading data for test...")
test_org_ds = Dataset(data=data_dictsTe, transform=test_org_transforms)
test_org_loader = DataLoader(test_org_ds, batch_size=1, num_workers=2)
for kk in range(1, 6):
directory = os.path.join(data_dir, net_name+str(kk))
if not os.path.exists(directory):
os.makedirs(directory)
post_transforms = Compose(
[
Invertd(
keys="pred",
transform=test_org_transforms,
orig_keys="image",
meta_keys="pred_meta_dict",
orig_meta_keys="image_meta_dict",
meta_key_postfix="meta_dict",
nearest_interp=False,
to_tensor=True,
),
AsDiscreted(keys="pred", argmax=True, to_onehot=2),
SaveImaged(keys="pred", meta_keys="pred_meta_dict", output_dir=directory, output_postfix="2ch", resample=False, separate_folder=False)
]
)
save_image = SaveImage(output_dir=directory,output_postfix="", resample=False, separate_folder=False)
loader = LoadImage()
model.load_state_dict(torch.load(os.path.join(container_dir, "model_weight", net_name+str(kk)+".pth")))
model.eval()
with torch.no_grad():
for test_data in test_org_loader:
test_inputs = test_data["image"].to(device)
sw_batch_size = 4
test_data["pred"] = sliding_window_inference(test_inputs, roi_size, sw_batch_size, model)
mask_pred = torch.argmax(test_data["pred"],dim=1)
save_image(mask_pred[0,:,:,:])
test_data = [post_transforms(i) for i in decollate_batch(test_data)]
# GE ENSAMBLE
folders = []
directory_single_pred = os.path.join(data_dir, net_name)
for i in range(1, 6): # Loop from 1 to 5
folder = f"{directory_single_pred}{i}" # Generate folder path
folders.append(folder) # Add the folder path to the list
output_folder = os.path.join(output_dir)
average_binary_masks(folders, output_folder)