From 26ca7c2c03af9a8246868260e06556a547314bd2 Mon Sep 17 00:00:00 2001 From: GaoLeiA Date: Wed, 2 Jul 2025 16:38:56 +0800 Subject: [PATCH] fix: use torch.no_grad() in inference to prevent excessive memory usage (~30GB) with inference (#349) --- scripts/realtime_inference.py | 1 + 1 file changed, 1 insertion(+) diff --git a/scripts/realtime_inference.py b/scripts/realtime_inference.py index 4a12be7..579b050 100644 --- a/scripts/realtime_inference.py +++ b/scripts/realtime_inference.py @@ -235,6 +235,7 @@ class Avatar: cv2.imwrite(f"{self.avatar_path}/tmp/{str(self.idx).zfill(8)}.png", combine_frame) self.idx = self.idx + 1 + @torch.no_grad() def inference(self, audio_path, out_vid_name, fps, skip_save_images): os.makedirs(self.avatar_path + '/tmp', exist_ok=True) print("start inference")