From 4b388768d0cac2866ddda4cc07e74d8b1d06b65f Mon Sep 17 00:00:00 2001
From: zhaomingwork <61895407+zhaomingwork@users.noreply.github.com>
Date: 星期四, 23 五月 2024 17:34:52 +0800
Subject: [PATCH] cpp http post server support (#1739)
---
funasr/train_utils/model_summary.py | 14 ++++----------
1 files changed, 4 insertions(+), 10 deletions(-)
diff --git a/funasr/train_utils/model_summary.py b/funasr/train_utils/model_summary.py
index 8d7f14f..4e92a33 100644
--- a/funasr/train_utils/model_summary.py
+++ b/funasr/train_utils/model_summary.py
@@ -1,4 +1,3 @@
-import humanfriendly
import numpy as np
import torch
@@ -48,6 +47,8 @@
def model_summary(model: torch.nn.Module) -> str:
message = "Model structure:\n"
message += str(model)
+ # for p in model.parameters():
+ # print(f"{p.numel()}")
tot_params = sum(p.numel() for p in model.parameters())
num_params = sum(p.numel() for p in model.parameters() if p.requires_grad)
percent_trainable = "{:.1f}".format(num_params * 100.0 / tot_params)
@@ -56,15 +57,8 @@
message += "\n\nModel summary:\n"
message += f" Class Name: {model.__class__.__name__}\n"
message += f" Total Number of model parameters: {tot_params}\n"
- message += (
- f" Number of trainable parameters: {num_params} ({percent_trainable}%)\n"
- )
- num_bytes = humanfriendly.format_size(
- sum(
- p.numel() * to_bytes(p.dtype) for p in model.parameters() if p.requires_grad
- )
- )
- message += f" Size: {num_bytes}\n"
+ message += f" Number of trainable parameters: {num_params} ({percent_trainable}%)\n"
+
dtype = next(iter(model.parameters())).dtype
message += f" Type: {dtype}"
return message
--
Gitblit v1.9.1