From 5b94675f62c8e6e9978c01cb46004f703ef6f0a1 Mon Sep 17 00:00:00 2001 From: orbisai0security Date: Mon, 29 Dec 2025 13:25:05 +0000 Subject: [PATCH] fix: resolve critical vulnerability V-005 Automatically generated security fix --- cosyvoice/cli/frontend.py | 2 +- cosyvoice/cli/model.py | 6 +++--- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/cosyvoice/cli/frontend.py b/cosyvoice/cli/frontend.py index 7ad6f7c..6d397cc 100644 --- a/cosyvoice/cli/frontend.py +++ b/cosyvoice/cli/frontend.py @@ -47,7 +47,7 @@ class CosyVoiceFrontEnd: providers=["CUDAExecutionProvider" if torch.cuda.is_available() else "CPUExecutionProvider"]) if os.path.exists(spk2info): - self.spk2info = torch.load(spk2info, map_location=self.device) + self.spk2info = torch.load(spk2info, map_location=self.device, weights_only=True) else: self.spk2info = {} self.allowed_special = allowed_special diff --git a/cosyvoice/cli/model.py b/cosyvoice/cli/model.py index 6bc3b31..a014dd4 100644 --- a/cosyvoice/cli/model.py +++ b/cosyvoice/cli/model.py @@ -62,12 +62,12 @@ class CosyVoiceModel: self.hift_cache_dict = {} def load(self, llm_model, flow_model, hift_model): - self.llm.load_state_dict(torch.load(llm_model, map_location=self.device), strict=True) + self.llm.load_state_dict(torch.load(llm_model, map_location=self.device, weights_only=True), strict=True) self.llm.to(self.device).eval() - self.flow.load_state_dict(torch.load(flow_model, map_location=self.device), strict=True) + self.flow.load_state_dict(torch.load(flow_model, map_location=self.device, weights_only=True), strict=True) self.flow.to(self.device).eval() # in case hift_model is a hifigan model - hift_state_dict = {k.replace('generator.', ''): v for k, v in torch.load(hift_model, map_location=self.device).items()} + hift_state_dict = {k.replace('generator.', ''): v for k, v in torch.load(hift_model, map_location=self.device, weights_only=True).items()} self.hift.load_state_dict(hift_state_dict, strict=True) self.hift.to(self.device).eval()