Skip to content

Pass detailed unique layer name to the TRT engine #2087

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Closed
wants to merge 3 commits into from
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
26 changes: 25 additions & 1 deletion py/torch_tensorrt/dynamo/fx_ts_compat/fx2trt.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import logging
import re
import warnings
from datetime import datetime
from packaging import version
Expand Down Expand Up @@ -291,8 +292,31 @@ def run(
engine, self._input_names, self._output_names, serialized_cache
)

def get_node_name(self, node):
# nn_module_stack preserves the call stack of pytorch nn.modules
# The call stack contains a detailed name of the module
# which shows exactly where the module is located in the
# network architecture.
stack_item = node.meta.get("nn_module_stack", None)
# The current node is the last item in the stack
mod_stack = stack_item.popitem() if stack_item else ""
node_name = str(node)
if mod_stack:
mod_name = str(mod_stack[0]).replace("___", "/")
# Clean up the module name
mod_name = re.sub("^.*__self", "", mod_name)
mod_name = re.sub("_(\d+)$", "/\g<1>", mod_name)
node_name = mod_name + "/" + node_name
else:
# Try an alternative way to get the module info
# like the node.meta['source_fn'] attr
pass

_LOGGER.debug(f"Node meta name {node_name}")
return node_name

def run_node(self, n):
self._cur_node_name = str(n)
self._cur_node_name = self.get_node_name(n)
# add "_itensor_to_tensor_meta"
kwargs = dict(n.kwargs)
kwargs["_itensor_to_tensor_meta"] = self._itensor_to_tensor_meta
Expand Down
22 changes: 21 additions & 1 deletion py/torch_tensorrt/fx/fx2trt.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
import logging
import os
import re
import warnings
from datetime import datetime
from typing import Any, Callable, Dict, List, NamedTuple, Optional, Sequence
Expand Down Expand Up @@ -270,8 +271,27 @@ def run(
engine, self._input_names, self._output_names, serialized_cache
)

def get_node_name(self, node):
# nn_module_stack preserves the call stack of pytorch nn.modules
# The call stack contains a detailed name of the module
# which shows exactly where the module is located in the
# network architecture.
stack_item = node.meta.get("nn_module_stack", None)
# The current node is the last item in the stack
mod_stack = stack_item.popitem() if stack_item else ""
node_name = str(node)
if mod_stack:
mod_name = str(mod_stack[0]).replace("___", "/")
# Clean up the module name
mod_name = re.sub("^.*__self", "", mod_name)
mod_name = re.sub("_(\d+)$", "/\g<1>", mod_name)
node_name = mod_name + "/" + node_name

_LOGGER.debug(f"Node meta name {node_name}")
return node_name

def run_node(self, n):
self._cur_node_name = str(n)
self._cur_node_name = self.get_node_name(n)
# add "_itensor_to_tensor_meta"
kwargs = dict(n.kwargs)
kwargs["_itensor_to_tensor_meta"] = self._itensor_to_tensor_meta
Expand Down