Skip to content

Commit

Permalink
1. optimize pad before conv op(MPolaris#5)
Browse files Browse the repository at this point in the history
2. fix ConvTranspose op
  • Loading branch information
MPolaris committed Aug 9, 2022
1 parent 5460705 commit 2201622
Showing 1 changed file with 80 additions and 51 deletions.
131 changes: 80 additions & 51 deletions layers/conv_layers.py
Original file line number Diff line number Diff line change
@@ -1,36 +1,43 @@
from . import OPERATOR
'''
Author: MPolaris && yutaka329
Thanks for yutaka329 with your pad tricks.
https://github.com/MPolaris/onnx2tflite/issues/5
'''
import logging
import tensorflow as tf
from tensorflow import keras

from . import OPERATOR

LOG = logging.getLogger("convolution_layers :")
@OPERATOR.register_operator("ConvTranspose")
class TFConvTranspose():
def __init__(self, tensor_grap, node_weights, node_inputs, node_attribute, *args, **kwargs) -> None:
super().__init__()
out_channel, in_channel = node_weights[node_inputs[1]].shape[:2]
# out_channel, in_channel = node_weights[node_inputs[1]].shape[:2]
dilations, group = node_attribute.get('dilations', 1), node_attribute.get('group', 1)
pads = node_attribute['pads'] if "pads" in node_attribute else None
kernel_shape, strides = node_attribute.get('kernel_shape', 1), node_attribute.get('strides', 1)

weights = node_weights[node_inputs[1]].transpose(2,3,1,0)
bias = node_weights[node_inputs[2]] if len(node_inputs) == 3 else None
self.conv = keras.layers.Conv2DTranspose(out_channel, kernel_shape, strides, "VALID", use_bias=False if bias is None else True,
height, width, n_filters, channels = weights.shape
self.pad = None
self.conv = keras.layers.Conv2DTranspose(filters=n_filters, kernel_size=(height, width), strides=strides, padding='VALID', use_bias=False if bias is None else True,
kernel_initializer=keras.initializers.Constant(weights),
bias_initializer='zeros' if bias is None else keras.initializers.Constant(bias),
output_padding=0,
dilation_rate=dilations)

self.pad =None
if pads is not None and max(pads) != 0:
padding = None
if len(pads) == 2 and (pads[0] > 0 or pads[1] > 0):
padding = (pads[0], pads[1])
elif len(pads) == 4 and (pads[0] > 0 or pads[1] > 0 or pads[2] > 0 or pads[3] > 0):
padding = ((pads[0], pads[2]), (pads[1], pads[3]))
self.pad = keras.layers.ZeroPadding2D(padding=padding)
LOG.warning("ConvTranspose with pad will lead output error to bigger, please check it out.")
assert(len(pads) == 2 or (pads[2] == pads[0] and pads[3] == pads[1]))
self.pad = keras.layers.Cropping2D(pads[:2])

def __call__(self, inputs):
if self.pad:
inputs = self.pad(inputs)
return self.conv(inputs)
inputs = self.conv(inputs)
return inputs


@OPERATOR.register_operator("Conv")
Expand Down Expand Up @@ -68,33 +75,38 @@ def __init__(self, in_channel_num, out_channel_num, kernel_size=1,
strides = (strides, strides)
if dilations[0] != 1 and strides[0] != 1:
raise Exception("Currently, specifying any dilation_rate value != 1 is incompatible with specifying any stride value != 1.")
self.conv = keras.layers.Conv2D(
out_channel_num, kernel_size, strides, "VALID", use_bias=False if bias is None else True,
kernel_initializer=keras.initializers.Constant(weights),
bias_initializer='zeros' if bias is None else keras.initializers.Constant(bias),
dilation_rate=dilations)

self.pad =None
if pads is not None and max(pads) != 0:
padding = None
if len(pads) == 2 and (pads[0] > 0 or pads[1] > 0):
padding = (pads[0], pads[1])
elif len(pads) == 4 and (pads[0] > 0 or pads[1] > 0 or pads[2] > 0 or pads[3] > 0):
padding = ((pads[0], pads[2]), (pads[1], pads[3]))
self.pad = keras.layers.ZeroPadding2D(padding=padding)
if pads is not None and max(pads) == 1 and max(strides) == 1:
self.conv = keras.layers.Conv2D(
out_channel_num, kernel_size, strides, "SAME", use_bias=False if bias is None else True,
kernel_initializer=keras.initializers.Constant(weights),
bias_initializer='zeros' if bias is None else keras.initializers.Constant(bias),
dilation_rate=dilations)
else:
self.conv = keras.layers.Conv2D(
out_channel_num, kernel_size, strides, "VALID", use_bias=False if bias is None else True,
kernel_initializer=keras.initializers.Constant(weights),
bias_initializer='zeros' if bias is None else keras.initializers.Constant(bias),
dilation_rate=dilations)
if pads is not None and max(pads) != 0:
padding = None
if len(pads) == 2 and (pads[0] > 0 or pads[1] > 0):
padding = (pads[0], pads[1])
elif len(pads) == 4 and (pads[0] > 0 or pads[1] > 0 or pads[2] > 0 or pads[3] > 0):
padding = ((pads[0], pads[2]), (pads[1], pads[3]))
self.pad = keras.layers.ZeroPadding2D(padding=padding)

def __call__(self, inputs):
if self.pad:
return self.conv(self.pad(inputs))
else:
return self.conv(inputs)
inputs = self.pad(inputs)
return self.conv(inputs)

class TFGroupConv():
# 分组卷积Group Convolution
def __init__(self, in_channel_num, out_channel_num, kernel_size=1,
strides=1, dilations=1, pads=None, groups=1, weights=None, bias=None):
super().__init__()
filters = weights.shape[-2]
assert groups*filters == out_channel_num, "Input channels and filters must both be divisible by groups."
if isinstance(dilations, int):
dilations = (dilations, dilations)
if isinstance(strides, int):
Expand All @@ -105,7 +117,7 @@ def __init__(self, in_channel_num, out_channel_num, kernel_size=1,
self.groups = groups
out_channel_num = int(out_channel_num//groups)
self.pad =None
if pads is not None and max(pads) != 0:
if pads is not None and (max(pads) != 0 and not (max(pads) == 1 and max(strides) == 1)):
padding = None
if len(pads) == 2 and (pads[0] > 0 or pads[1] > 0):
padding = (pads[0], pads[1])
Expand All @@ -115,11 +127,18 @@ def __init__(self, in_channel_num, out_channel_num, kernel_size=1,

self.convs = []
for i in range(groups):
self.convs.append(keras.layers.Conv2D(
out_channel_num, kernel_size, strides, 'VALID', use_bias=False if bias is None else True,
if pads is not None and max(pads) == 1 and max(strides) == 1:
self.convs.append(keras.layers.Conv2D(
out_channel_num, kernel_size, strides, 'SAME', use_bias=False if bias is None else True,
dilation_rate=dilations,
kernel_initializer=keras.initializers.Constant(weights[:, :, :, i*out_channel_num:(i+1)*out_channel_num]),
bias_initializer='zeros' if bias is None else keras.initializers.Constant(bias[i*out_channel_num:(i+1)*out_channel_num])))
else:
self.convs.append(keras.layers.Conv2D(
out_channel_num, kernel_size, strides, 'VALID', use_bias=False if bias is None else True,
dilation_rate=dilations,
kernel_initializer=keras.initializers.Constant(weights[:, :, :, i*out_channel_num:(i+1)*out_channel_num]),
bias_initializer='zeros' if bias is None else keras.initializers.Constant(bias[i*out_channel_num:(i+1)*out_channel_num])))

def __call__(self, inputs):
if self.pad is not None:
Expand All @@ -139,25 +158,35 @@ def __init__(self, kernel_size=1, strides=1, dilations=1, pads=None, weights=Non
dilations = (dilations, dilations)
if isinstance(strides, int):
strides = (strides, strides)
self.conv = keras.layers.DepthwiseConv2D(
kernel_size, strides, "VALID", use_bias=False if bias is None else True,
weights=[weights] if bias is None else [weights, bias],
dilation_rate=dilations,
activation=None,
kernel_initializer='zeros',
bias_initializer='zeros'
)

self.pad =None
if pads is not None and max(pads) != 0:
padding = None
if len(pads) == 2 and (pads[0] > 0 or pads[1] > 0):
padding = (pads[0], pads[1])
elif len(pads) == 4 and (pads[0] > 0 or pads[1] > 0 or pads[2] > 0 or pads[3] > 0):
padding = ((pads[0], pads[2]), (pads[1], pads[3]))
self.pad = keras.layers.ZeroPadding2D(padding=padding)

if pads is not None and max(pads) == 1 and max(strides) == 1:
self.conv = keras.layers.DepthwiseConv2D(
kernel_size, strides, "SAME", use_bias=False if bias is None else True,
weights=[weights] if bias is None else [weights, bias],
dilation_rate=dilations,
activation=None,
kernel_initializer='zeros',
bias_initializer='zeros'
)
else:
self.conv = keras.layers.DepthwiseConv2D(
kernel_size, strides, "VALID", use_bias=False if bias is None else True,
weights=[weights] if bias is None else [weights, bias],
dilation_rate=dilations,
activation=None,
kernel_initializer='zeros',
bias_initializer='zeros'
)
if pads is not None and max(pads) != 0:
padding = None
if len(pads) == 2 and (pads[0] > 0 or pads[1] > 0):
padding = (pads[0], pads[1])
elif len(pads) == 4 and (pads[0] > 0 or pads[1] > 0 or pads[2] > 0 or pads[3] > 0):
padding = ((pads[0], pads[2]), (pads[1], pads[3]))
self.pad = keras.layers.ZeroPadding2D(padding=padding)

def __call__(self, inputs):
if self.pad:
return self.conv(self.pad(inputs))
else:
return self.conv(inputs)
inputs = self.pad(inputs)
return self.conv(inputs)

0 comments on commit 2201622

Please sign in to comment.