import shutil import torch import gc def is_disk_full(min_free_space_in_GB=10): total, used, free = shutil.disk_usage("/") free_gb = free / (1024 ** 3) if free_gb >= min_free_space_in_GB: print(f'enough space available ({free_gb} GB)') return False else: print('clean up!') return True def free_gpu_memory(obj=None, label='Object'): """ Frees GPU memory by deleting the passed object, collecting garbage, and clearing PyTorch's CUDA cache. Args: obj (optional): The model or pipeline to delete. label (str): Label for logging (e.g., 'model', 'pipeline'). """ if torch.cuda.is_available(): print(f"\n[{label}] Before deletion: {torch.cuda.memory_allocated() / 1e6:.2f} MB") if obj is not None: del obj gc.collect() torch.cuda.empty_cache() if torch.cuda.is_available(): print(f"[{label}] After deletion: {torch.cuda.memory_allocated() / 1e6:.2f} MB\n")