From 4a99a0ac273956a7f8e6608e71aafbb5202fcca8 Mon Sep 17 00:00:00 2001
From: 游雁 <zhifu.gzf@alibaba-inc.com>
Date: 星期六, 11 五月 2024 21:55:14 +0800
Subject: [PATCH] sensevoice sanm

---
 funasr/train_utils/model_summary.py |   14 ++++----------
 1 files changed, 4 insertions(+), 10 deletions(-)

diff --git a/funasr/train_utils/model_summary.py b/funasr/train_utils/model_summary.py
index 8d7f14f..4e92a33 100644
--- a/funasr/train_utils/model_summary.py
+++ b/funasr/train_utils/model_summary.py
@@ -1,4 +1,3 @@
-import humanfriendly
 import numpy as np
 import torch
 
@@ -48,6 +47,8 @@
 def model_summary(model: torch.nn.Module) -> str:
     message = "Model structure:\n"
     message += str(model)
+    # for p in model.parameters():
+    #     print(f"{p.numel()}")
     tot_params = sum(p.numel() for p in model.parameters())
     num_params = sum(p.numel() for p in model.parameters() if p.requires_grad)
     percent_trainable = "{:.1f}".format(num_params * 100.0 / tot_params)
@@ -56,15 +57,8 @@
     message += "\n\nModel summary:\n"
     message += f"    Class Name: {model.__class__.__name__}\n"
     message += f"    Total Number of model parameters: {tot_params}\n"
-    message += (
-        f"    Number of trainable parameters: {num_params} ({percent_trainable}%)\n"
-    )
-    num_bytes = humanfriendly.format_size(
-        sum(
-            p.numel() * to_bytes(p.dtype) for p in model.parameters() if p.requires_grad
-        )
-    )
-    message += f"    Size: {num_bytes}\n"
+    message += f"    Number of trainable parameters: {num_params} ({percent_trainable}%)\n"
+
     dtype = next(iter(model.parameters())).dtype
     message += f"    Type: {dtype}"
     return message

--
Gitblit v1.9.1