|
import os |
|
import subprocess |
|
import shutil |
|
import nibabel as nib |
|
import matplotlib.pyplot as plt |
|
import glob |
|
import json |
|
import rarfile |
|
import numpy as np |
|
import cv2 |
|
from pathlib import Path |
|
import argparse |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
def convert_bmp_to_niigz( |
|
bmp_dir, |
|
niigz_dir, |
|
slice_dim_type, |
|
pseudo_voxel_size, |
|
flip_dim0=False, |
|
flip_dim1=False, |
|
swap_dim01=False, |
|
): |
|
""" |
|
Convert BMP image files to NIfTI (.nii.gz) format. |
|
This function converts 2D BMP images to 3D NIfTI volumes with specified slice orientation. |
|
The output NIfTI files will have RAS+ orientation with specified voxel size. |
|
Args: |
|
bmp_dir (str): Input directory containing BMP files to convert |
|
niigz_dir (str): Output directory where NIfTI files will be saved |
|
slice_dim_type (int): Slice dimension/orientation type: |
|
0: Sagittal (YZ plane) |
|
1: Coronal (XZ plane) |
|
2: Axial (XY plane) |
|
pseudo_voxel_size (list): List of 3 floats specifying voxel dimensions in mm [x,y,z] |
|
flip_dim0 (bool, optional): If True, flip image along dimension 0. Defaults to False. |
|
flip_dim1 (bool, optional): If True, flip image along dimension 1. Defaults to False. |
|
swap_dim01 (bool, optional): If True, swap dimensions 0 and 1. Defaults to False. |
|
Returns: |
|
tuple: Original image dimensions (height, width) of the first converted BMP |
|
""" |
|
|
|
|
|
if slice_dim_type not in [0, 1, 2]: |
|
raise ValueError("slice_dim_type must be 0, 1, or 2") |
|
|
|
|
|
pseudo_voxel_size = list(pseudo_voxel_size) |
|
|
|
|
|
Path(niigz_dir).mkdir(parents=True, exist_ok=True) |
|
|
|
|
|
bmp_files = list(Path(bmp_dir).glob("*.bmp")) |
|
print(f"Found {len(bmp_files)} .bmp files") |
|
|
|
for bmp_file in bmp_files: |
|
try: |
|
print(f"Converting {bmp_file.name}") |
|
|
|
|
|
img_2d = cv2.imread(str(bmp_file), cv2.IMREAD_GRAYSCALE) |
|
img_size_dim0, img_size_dim1 = img_2d.shape |
|
|
|
|
|
if flip_dim0: |
|
img_2d = cv2.flip(img_2d, 0) |
|
if flip_dim1: |
|
img_2d = cv2.flip(img_2d, 1) |
|
if swap_dim01: |
|
img_2d = np.swapaxes(img_2d, 0, 1) |
|
|
|
|
|
if slice_dim_type == 0: |
|
img_3d = np.zeros( |
|
(1, img_2d.shape[0], img_2d.shape[1]), dtype=img_2d.dtype |
|
) |
|
img_3d[0, :, :] = img_2d |
|
elif slice_dim_type == 1: |
|
img_3d = np.zeros( |
|
(img_2d.shape[0], 1, img_2d.shape[1]), dtype=img_2d.dtype |
|
) |
|
img_3d[:, 0, :] = img_2d |
|
else: |
|
img_3d = np.zeros( |
|
(img_2d.shape[0], img_2d.shape[1], 1), dtype=img_2d.dtype |
|
) |
|
img_3d[:, :, 0] = img_2d |
|
|
|
|
|
|
|
affine = np.diag(pseudo_voxel_size + [1]) |
|
|
|
|
|
nii_img = nib.Nifti1Image(img_3d, affine) |
|
|
|
|
|
nii_img.header.set_zooms(pseudo_voxel_size) |
|
|
|
|
|
output_file = Path(niigz_dir) / f"{bmp_file.stem}.nii.gz" |
|
nib.save(nii_img, str(output_file)) |
|
print(f"Saved to {output_file}") |
|
|
|
except Exception as e: |
|
print(f"Error converting {bmp_file.name}: {e}") |
|
|
|
return img_size_dim0, img_size_dim1 |
|
|
|
|
|
|
|
def process_landmarks_data( |
|
landmarks_txt_dir: str, |
|
landmarks_json_dir: str, |
|
n: int, |
|
img_sizes, |
|
flip_dim0=False, |
|
flip_dim1=False, |
|
swap_dim01=False, |
|
) -> None: |
|
""" |
|
Read landmark points from all txt files in a directory and save as JSON files. |
|
|
|
Args: |
|
in_dir (str): Directory containing the txt files |
|
out_dir (str): Directory where JSON files will be saved |
|
n (int): Number of lines to read from each file |
|
height_width_orig: Original height and width of the image |
|
swap_xy (bool): Whether to swap x and y coordinates |
|
slip_x (bool): Whether to flip coordinates along x-axis |
|
slip_y (bool): Whether to flip coordinates along y-axis |
|
""" |
|
( |
|
os.makedirs(landmarks_json_dir, exist_ok=True) |
|
if not os.path.exists(landmarks_json_dir) |
|
else None |
|
) |
|
|
|
for txt_file in glob.glob(os.path.join(landmarks_txt_dir, "*.txt")): |
|
|
|
landmarks = {} |
|
filename = os.path.basename(txt_file) |
|
json_path = os.path.join(landmarks_json_dir, filename.replace(".txt", ".json")) |
|
|
|
try: |
|
with open(txt_file, "r") as f: |
|
for i in range(n): |
|
line = f.readline().strip() |
|
if not line: |
|
break |
|
|
|
|
|
|
|
|
|
idx_dim1, idx_dim0 = map(int, line.split(",")) |
|
|
|
|
|
|
|
|
|
if flip_dim0: |
|
idx_dim0 = img_sizes[0] - idx_dim0 |
|
if flip_dim1: |
|
idx_dim1 = img_sizes[1] - idx_dim1 |
|
if swap_dim01: |
|
idx_dim0, idx_dim1 = idx_dim1, idx_dim0 |
|
|
|
|
|
landmarks[f"P{i+1}"] = [ |
|
coord - 1 for coord in [1, idx_dim0, idx_dim1] |
|
] |
|
|
|
|
|
json_dict = { |
|
"slice_landmarks_x": [ |
|
{ |
|
"slice_idx": 1, |
|
"landmarks": landmarks, |
|
}, |
|
], |
|
"slice_landmarks_y": [], |
|
"slice_landmarks_z": [], |
|
} |
|
|
|
|
|
with open(json_path, "w") as f: |
|
json.dump(json_dict, f, indent=4) |
|
|
|
except FileNotFoundError: |
|
print(f"Error: File {txt_file} not found") |
|
except ValueError: |
|
print(f"Error: Invalid format in file {txt_file}") |
|
except Exception as e: |
|
print(f"Error reading file {txt_file}: {str(e)}") |
|
|
|
|
|
def plot_sagittal_slice_with_landmarks( |
|
nii_path: str, json_path: str, fig_path: str = None |
|
): |
|
"""Plot first slice from NIfTI file and overlay landmarks from JSON file. |
|
|
|
Args: |
|
nii_path (str): Path to .nii.gz file |
|
json_path (str): Path to landmarks JSON file |
|
fig_path (str, optional): Path to save the plot. If None, displays plot |
|
""" |
|
|
|
nii_img = nib.load(nii_path) |
|
slice_data = nii_img.get_fdata()[0, :, :] |
|
|
|
|
|
with open(json_path, "r") as f: |
|
landmarks_json = json.load(f) |
|
|
|
|
|
plt.figure(figsize=(12, 12)) |
|
plt.imshow( |
|
slice_data.T, cmap="gray", origin="lower" |
|
) |
|
|
|
|
|
coords_dim0 = [] |
|
coords_dim1 = [] |
|
landmarks = landmarks_json["slice_landmarks_x"][0]["landmarks"] |
|
for point_id, coords in landmarks.items(): |
|
if len(coords) == 3: |
|
|
|
coords_dim0.append(coords[1]) |
|
coords_dim1.append(coords[2]) |
|
|
|
|
|
plt.scatter( |
|
coords_dim0, |
|
coords_dim1, |
|
facecolors="#18A727", |
|
edgecolors="black", |
|
marker="o", |
|
s=80, |
|
linewidth=1.5, |
|
) |
|
for i, (x, y) in enumerate(zip(coords_dim0, coords_dim1), 1): |
|
plt.annotate( |
|
f"$\\mathbf{{{i}}}$", |
|
(x, y), |
|
xytext=(2, 2), |
|
textcoords="offset points", |
|
color="#FE9100", |
|
fontsize=14, |
|
) |
|
|
|
|
|
plt.xlabel("Anterior →", fontsize=14) |
|
plt.ylabel("Superior →", fontsize=14) |
|
plt.margins(0) |
|
|
|
|
|
plt.savefig(fig_path, bbox_inches="tight", dpi=300) |
|
print(f"Plot saved to: {fig_path}") |
|
plt.close() |
|
|
|
|
|
def plot_sagittal_slice_with_landmarks_batch( |
|
image_dir: str, landmark_dir: str, fig_dir: str |
|
): |
|
"""Plot all cases from given directories. |
|
|
|
Args: |
|
image_dir (str): Directory containing .nii.gz files |
|
landmark_dir (str): Directory containing landmark JSON files |
|
fig_dir (str): Directory to save output figures |
|
|
|
""" |
|
|
|
os.makedirs(fig_dir, exist_ok=True) |
|
|
|
|
|
for nii_path in glob.glob(os.path.join(image_dir, "*.nii.gz")): |
|
base_name = os.path.splitext(os.path.splitext(os.path.basename(nii_path))[0])[0] |
|
json_path = os.path.join(landmark_dir, f"{base_name}.json") |
|
fig_path = os.path.join(fig_dir, f"{base_name}.png") |
|
|
|
|
|
if os.path.exists(json_path): |
|
plot_sagittal_slice_with_landmarks(nii_path, json_path, fig_path) |
|
else: |
|
print(f"Warning: No landmark file found for {base_name}") |
|
|
|
|
|
def download_and_extract(dataset_dir, dataset_name): |
|
|
|
print(f"Downloading {dataset_name} dataset to {dataset_dir}...") |
|
|
|
|
|
|
|
|
|
|
|
url = "https://figshare.com/ndownloader/articles/3471833?private_link=37ec464af8e81ae6ebbf" |
|
output_file = "Cephalogram400.zip" |
|
subprocess.run(["curl", url, "-o", output_file], check=True) |
|
|
|
|
|
print("Extracting ZIP file...") |
|
subprocess.run(["unzip", output_file], check=True) |
|
|
|
|
|
print("Extracting RAR files...") |
|
for file in os.listdir("."): |
|
if file.endswith(".rar"): |
|
with rarfile.RarFile(file) as rf: |
|
rf.extractall() |
|
|
|
|
|
os.makedirs("Images-raw", exist_ok=True) |
|
|
|
|
|
for src_path in glob.glob(f"RawImage/**/*.bmp", recursive=True): |
|
shutil.move(src_path, os.path.join("Images-raw", os.path.basename(src_path))) |
|
|
|
|
|
Flag_flip_dim0 = True |
|
Flag_flip_dim1 = False |
|
Flag_swap_dim01 = True |
|
img_size_dim0, img_size_dim1 = convert_bmp_to_niigz( |
|
"Images-raw", |
|
"Images", |
|
slice_dim_type=0, |
|
pseudo_voxel_size=[0.1, 0.1, 0.1], |
|
flip_dim0=Flag_flip_dim0, |
|
flip_dim1=Flag_flip_dim1, |
|
swap_dim01=Flag_swap_dim01, |
|
) |
|
|
|
|
|
process_landmarks_data( |
|
"400_senior", |
|
"Landmarks", |
|
19, |
|
img_sizes=[img_size_dim0, img_size_dim1], |
|
flip_dim0=Flag_flip_dim0, |
|
flip_dim1=Flag_flip_dim1, |
|
swap_dim01=Flag_swap_dim01, |
|
) |
|
|
|
|
|
plot_sagittal_slice_with_landmarks_batch("Images", "Landmarks", "Landmarks-fig") |
|
|
|
|
|
for dir_name in [ |
|
"RawImage", |
|
"400_junior", |
|
"400_senior", |
|
"Images-raw", |
|
"EvaluationCode", |
|
]: |
|
shutil.rmtree(dir_name, ignore_errors=True) |
|
for file in os.listdir("."): |
|
if file.endswith((".rar", ".zip")): |
|
os.remove(file) |
|
|
|
|
|
print(f"Download and extraction completed for {dataset_name}") |
|
|
|
|
|
if __name__ == "__main__": |
|
|
|
parser = argparse.ArgumentParser(description="Download and extract dataset") |
|
parser.add_argument( |
|
"-d", |
|
"--dir_datasets_data", |
|
help="Directory path where datasets will be stored", |
|
required=True, |
|
) |
|
parser.add_argument( |
|
"-n", |
|
"--dataset_name", |
|
help="Name of the dataset", |
|
required=True, |
|
) |
|
args = parser.parse_args() |
|
|
|
|
|
dataset_dir = os.path.join(args.dir_datasets_data, args.dataset_name) |
|
os.makedirs(dataset_dir, exist_ok=True) |
|
|
|
|
|
os.chdir(dataset_dir) |
|
|
|
|
|
download_and_extract(dataset_dir, args.dataset_name) |
|
|