Skip to content

Commit

Permalink
fixed redundant code in TRT Interpreter
Browse files Browse the repository at this point in the history
  • Loading branch information
cehongwang committed Feb 14, 2025
1 parent 2368e63 commit 7fe4343
Showing 1 changed file with 3 additions and 10 deletions.
13 changes: 3 additions & 10 deletions py/torch_tensorrt/dynamo/conversion/_TRTInterpreter.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,9 @@
from torch_tensorrt.dynamo.conversion._ConverterRegistry import (
DYNAMO_CONVERTERS as CONVERTERS,
)
from torch_tensorrt.dynamo.conversion._ConverterRegistry import CallingConvention
from torch_tensorrt.dynamo.conversion._ConverterRegistry import (
CallingConvention,
)
from torch_tensorrt.dynamo.conversion._TRTBuilderMonitor import TRTBulderMonitor
from torch_tensorrt.dynamo.conversion.converter_utils import (
get_node_io,
Expand Down Expand Up @@ -740,10 +742,6 @@ def run(
def run_node(self, n: torch.fx.Node) -> torch.fx.Node:
self._cur_node_name = get_node_name(n)
self._cur_node = n
# add "_itensor_to_tensor_meta"
kwargs = dict(n.kwargs)
kwargs["_itensor_to_tensor_meta"] = self._itensor_to_tensor_meta
n.kwargs = kwargs

if _LOGGER.isEnabledFor(logging.DEBUG):
_LOGGER.debug(
Expand All @@ -759,11 +757,6 @@ def run_node(self, n: torch.fx.Node) -> torch.fx.Node:
f"Converted node {self._cur_node_name} [{n.target}] ({get_node_io(n, self.const_mapping)})"
)

# remove "_itensor_to_tensor_meta"
kwargs = dict(n.kwargs)
del kwargs["_itensor_to_tensor_meta"]
n.kwargs = kwargs

if isinstance(trt_node, trt.ITensor):
self._itensor_to_tensor_meta[trt_node] = n.meta.get("tensor_meta")

Expand Down

0 comments on commit 7fe4343

Please sign in to comment.