Skip to content

update ModelSummary #20945

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 6 commits into
base: master
Choose a base branch
from
Open
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 11 additions & 3 deletions src/lightning/pytorch/utilities/model_summary/model_summary.py
Original file line number Diff line number Diff line change
Expand Up @@ -145,6 +145,13 @@ def training(self) -> bool:
"""Returns whether the module is in training mode."""
return self._module.training

@property
def requires_grad(self) -> bool:
"""Returns whether the module is requires grad."""
if self.num_parameters > 0:
return any(param.requires_grad for name, param in self._module.named_parameters())
return True


class ModelSummary:
"""Generates a summary of all layers in a :class:`~lightning.pytorch.core.LightningModule`.
Expand Down Expand Up @@ -265,8 +272,8 @@ def param_nums(self) -> list[int]:
return [layer.num_parameters for layer in self._layer_summary.values()]

@property
def training_modes(self) -> list[bool]:
return [layer.training for layer in self._layer_summary.values()]
def training_modes(self) -> list[int]:
return [(2 if layer.training else 1) if layer.requires_grad else 0 for layer in self._layer_summary.values()]

@property
def total_training_modes(self) -> dict[str, int]:
Expand Down Expand Up @@ -361,12 +368,13 @@ def _get_summary_data(self) -> list[tuple[str, list[str]]]:
Layer Name, Layer Type, Number of Parameters, Input Sizes, Output Sizes, Model Size

"""
param_mode = {0: "freeze", 1: "eval", 2: "train"}
arrays = [
(" ", list(map(str, range(len(self._layer_summary))))),
("Name", self.layer_names),
("Type", self.layer_types),
("Params", list(map(get_human_readable_count, self.param_nums))),
("Mode", ["train" if mode else "eval" for mode in self.training_modes]),
("Mode", [param_mode[mode] for mode in self.training_modes]),
("FLOPs", list(map(get_human_readable_count, (sum(x.values()) for x in self.flop_counts.values())))),
]
if self._model.example_input_array is not None:
Expand Down