From 0d2f2734024ccd1c44aead7608214b924bffbebe Mon Sep 17 00:00:00 2001 From: Emmanuel Ferdman Date: Tue, 10 Jun 2025 13:03:23 +0300 Subject: [PATCH] Resolve Python Logger warnings (#2379) Signed-off-by: Emmanuel Ferdman --- GPT_SoVITS/AR/modules/optim.py | 6 +++--- GPT_SoVITS/utils.py | 4 ++-- api.py | 4 ++-- 3 files changed, 7 insertions(+), 7 deletions(-) diff --git a/GPT_SoVITS/AR/modules/optim.py b/GPT_SoVITS/AR/modules/optim.py index aeebbee..fb87848 100644 --- a/GPT_SoVITS/AR/modules/optim.py +++ b/GPT_SoVITS/AR/modules/optim.py @@ -354,7 +354,7 @@ class ScaledAdam(BatchedOptimizer): if ans < 1.0: first_state["num_clipped"] += 1 if ans < 0.1: - logging.warn(f"Scaling gradients by {ans}, model_norm_threshold={model_norm_threshold}") + logging.warning(f"Scaling gradients by {ans}, model_norm_threshold={model_norm_threshold}") if self.show_dominant_parameters: assert p.shape[0] == len(param_names) self._show_gradient_dominating_parameter(tuples, tot_sumsq) @@ -362,7 +362,7 @@ class ScaledAdam(BatchedOptimizer): def _show_gradient_dominating_parameter(self, tuples: List[Tuple[Tensor, dict, List[str]]], tot_sumsq: Tensor): """ - Show information of parameter wihch dominanting tot_sumsq. + Show information of parameter which dominating tot_sumsq. Args: tuples: a list of tuples of (param, state, param_names) @@ -415,7 +415,7 @@ class ScaledAdam(BatchedOptimizer): dominant_grad, ) = sorted_by_proportion[dominant_param_name] logging.info( - f"Parameter Dominanting tot_sumsq {dominant_param_name}" + f"Parameter Dominating tot_sumsq {dominant_param_name}" f" with proportion {dominant_proportion:.2f}," f" where dominant_sumsq=(grad_sumsq*orig_rms_sq)" f"={dominant_sumsq:.3e}," diff --git a/GPT_SoVITS/utils.py b/GPT_SoVITS/utils.py index 14955fd..08e1838 100644 --- a/GPT_SoVITS/utils.py +++ b/GPT_SoVITS/utils.py @@ -283,7 +283,7 @@ def get_hparams_from_file(config_path): def check_git_hash(model_dir): source_dir = os.path.dirname(os.path.realpath(__file__)) if not os.path.exists(os.path.join(source_dir, ".git")): - logger.warn( + logger.warning( "{} is not a git repository, therefore hash value comparison will be ignored.".format( source_dir, ) @@ -296,7 +296,7 @@ def check_git_hash(model_dir): if os.path.exists(path): saved_hash = open(path).read() if saved_hash != cur_hash: - logger.warn( + logger.warning( "git hash values are different. {}(saved) != {}(current)".format( saved_hash[:8], cur_hash[:8], diff --git a/api.py b/api.py index 164a1f1..7354ff7 100644 --- a/api.py +++ b/api.py @@ -1071,10 +1071,10 @@ default_refer = DefaultRefer(args.default_refer_path, args.default_refer_text, a # 模型路径检查 if sovits_path == "": sovits_path = g_config.pretrained_sovits_path - logger.warn(f"未指定SoVITS模型路径, fallback后当前值: {sovits_path}") + logger.warning(f"未指定SoVITS模型路径, fallback后当前值: {sovits_path}") if gpt_path == "": gpt_path = g_config.pretrained_gpt_path - logger.warn(f"未指定GPT模型路径, fallback后当前值: {gpt_path}") + logger.warning(f"未指定GPT模型路径, fallback后当前值: {gpt_path}") # 指定默认参考音频, 调用方 未提供/未给全 参考音频参数时使用 if default_refer.path == "" or default_refer.text == "" or default_refer.language == "":