Deep learning 如何在点柱中使用tensorboard addgraph

Deep learning 如何在点柱中使用tensorboard addgraph,deep-learning,pytorch,object-detection,tensorboard,Deep Learning,Pytorch,Object Detection,Tensorboard,我最近刚开始学习基于3D点云的目标检测,这个算法在Pointpillars中,我想用tensorboard查看网络结构,但是发生了一个错误。 以下是网络模型的代码 class PFNLayer(nn.Module): def __init__(self, in_channels, out_channels, use_norm=True, last_layer=False): "

我最近刚开始学习基于3D点云的目标检测,这个算法在Pointpillars中,我想用tensorboard查看网络结构,但是发生了一个错误。 以下是网络模型的代码

class PFNLayer(nn.Module):
def __init__(self,
             in_channels,
             out_channels,
             use_norm=True,
             last_layer=False):
    """
    Pillar Feature Net Layer.
    The Pillar Feature Net could be composed of a series of these layers, but the PointPillars paper results only
    used a single PFNLayer. This layer performs a similar role as second.pytorch.voxelnet.VFELayer.
    :param in_channels: <int>. Number of input channels.
    :param out_channels: <int>. Number of output channels.
    :param use_norm: <bool>. Whether to include BatchNorm.
    :param last_layer: <bool>. If last_layer, there is no concatenation of features.
    """

    super().__init__()
    self.name = 'PFNLayer'
    self.last_vfe = last_layer
    if not self.last_vfe:
        out_channels = out_channels // 2
    self.units = out_channels

    if use_norm:
        BatchNorm1d = change_default_args(eps=1e-3, momentum=0.01)(nn.BatchNorm1d)
        Linear = change_default_args(bias=False)(nn.Linear)
    else:
        BatchNorm1d = Empty
        Linear = change_default_args(bias=True)(nn.Linear)

    self.linear = Linear(in_channels, self.units)
    self.norm = BatchNorm1d(self.units)

def forward(self, inputs):
    # mPFNLayerInputs = inputs
    x = self.linear(inputs)
    x = self.norm(x.permute(0, 2, 1).contiguous()).permute(0, 2, 1).contiguous()
    x = F.relu(x)

    x_max = torch.max(x, dim=1, keepdim=True)[0]

    if self.last_vfe:
        return x_max
    else:
        x_repeat = x_max.repeat(1, inputs.shape[1], 1)
        x_concatenated = torch.cat([x, x_repeat], dim=2)
        return x_concatenated
class PFNLayer(nn.Module):
def __init__(self,
             in_channels,
             out_channels,
             use_norm=True,
             last_layer=False):
    """
    Pillar Feature Net Layer.
    The Pillar Feature Net could be composed of a series of these layers, but the PointPillars paper results only
    used a single PFNLayer. This layer performs a similar role as second.pytorch.voxelnet.VFELayer.
    :param in_channels: <int>. Number of input channels.
    :param out_channels: <int>. Number of output channels.
    :param use_norm: <bool>. Whether to include BatchNorm.
    :param last_layer: <bool>. If last_layer, there is no concatenation of features.
    """

    super().__init__()
    self.name = 'PFNLayer'
    self.last_vfe = last_layer
    if not self.last_vfe:
        out_channels = out_channels // 2
    self.units = out_channels

    if use_norm:
        BatchNorm1d = change_default_args(eps=1e-3, momentum=0.01)(nn.BatchNorm1d)
        Linear = change_default_args(bias=False)(nn.Linear)
    else:
        BatchNorm1d = Empty
        Linear = change_default_args(bias=True)(nn.Linear)

    self.linear = Linear(in_channels, self.units)
    self.norm = BatchNorm1d(self.units)

def forward(self, inputs):
    with SummaryWriter(comment='PFNLayer') as w:
        w.add_graph(self, input_to_model=inputs, verbose=False)
    return self.forward1(inputs)

def forward1(self, inputs):
    # mPFNLayerInputs = inputs
    x = self.linear(inputs)
    x = self.norm(x.permute(0, 2, 1).contiguous()).permute(0, 2, 1).contiguous()
    x = F.relu(x)

    x_max = torch.max(x, dim=1, keepdim=True)[0]

    if self.last_vfe:
        return x_max
    else:
        x_repeat = x_max.repeat(1, inputs.shape[1], 1)
        x_concatenated = torch.cat([x, x_repeat], dim=2)
        return x_concatenated
Tracing can't be nested
Error occurs, No graph saved
Tracing can't be nested
Error occurs, No graph saved