Source code for layers.conv_bn_relu

import torch.nn as nn
import torch.nn.functional as F


[docs]class ConvBNReLU(nn.Module): def __init__(self, in_channels, out_channels, kernel_size, stride=1, padding=0, dilation=1, groups=1, bias=True): super(ConvBNReLU, self).__init__() self.kernel_size = kernel_size self.conv = nn.Conv1d(in_channels, out_channels, kernel_size, stride, padding, dilation, groups, bias) self.bn = nn.BatchNorm1d(out_channels) self.relu = F.relu
[docs] def forward(self, x): x = self.conv(x) x = self.bn(x) x = self.relu(x) return x