| | |
| | | import humanfriendly |
| | | import numpy as np |
| | | import torch |
| | | |
| | |
| | | message += "\n\nModel summary:\n" |
| | | message += f" Class Name: {model.__class__.__name__}\n" |
| | | message += f" Total Number of model parameters: {tot_params}\n" |
| | | message += ( |
| | | f" Number of trainable parameters: {num_params} ({percent_trainable}%)\n" |
| | | ) |
| | | num_bytes = humanfriendly.format_size( |
| | | sum( |
| | | p.numel() * to_bytes(p.dtype) for p in model.parameters() if p.requires_grad |
| | | ) |
| | | ) |
| | | message += f" Size: {num_bytes}\n" |
| | | message += f" Number of trainable parameters: {num_params} ({percent_trainable}%)\n" |
| | | |
| | | dtype = next(iter(model.parameters())).dtype |
| | | message += f" Type: {dtype}" |
| | | return message |