File size: 62 Bytes
1f8beea
 
 
 
 
1
2
3
4
5
6
import torch
import gc

torch.cuda.empty_cache()
gc.collect()