@@ -85,24 +85,21 @@ def __init__(
85
85
86
86
if act is None :
87
87
act = tf .identity
88
+
88
89
if W_init_args is None :
89
90
W_init_args = {}
90
91
if b_init_args is None :
91
92
b_init_args = {}
92
93
93
94
with tf .variable_scope (name ):
94
95
W = tf .get_variable (name = 'W_conv1d' , shape = shape , initializer = W_init , dtype = LayersConfig .tf_dtype , ** W_init_args )
95
- self .outputs = tf .nn .convolution (
96
- self .inputs , W , strides = (stride , ), padding = padding , dilation_rate = (dilation_rate , ), data_format = data_format ) # 1.2
96
+ self .outputs = tf .nn .convolution (self .inputs , W , strides = (stride , ), padding = padding , dilation_rate = (dilation_rate , )) # 1.2
97
97
if b_init :
98
98
b = tf .get_variable (name = 'b_conv1d' , shape = (shape [- 1 ]), initializer = b_init , dtype = LayersConfig .tf_dtype , ** b_init_args )
99
99
self .outputs = self .outputs + b
100
100
101
101
self .outputs = act (self .outputs )
102
102
103
- # self.all_layers = list(layer.all_layers)
104
- # self.all_params = list(layer.all_params)
105
- # self.all_drop = dict(layer.all_drop)
106
103
self .all_layers .append (self .outputs )
107
104
if b_init :
108
105
self .all_params .extend ([W , b ])
@@ -1260,22 +1257,7 @@ def deconv2d_bilinear_upsampling_initializer(shape):
1260
1257
return bilinear_weights_init
1261
1258
1262
1259
1263
- @deprecated_alias (layer = 'prev_layer' , end_support_version = 1.9 ) # TODO remove this line for the 1.9 release
1264
- def conv1d (
1265
- prev_layer ,
1266
- n_filter = 32 ,
1267
- filter_size = 5 ,
1268
- stride = 1 ,
1269
- dilation_rate = 1 ,
1270
- act = tf .identity ,
1271
- padding = 'SAME' ,
1272
- data_format = "NWC" ,
1273
- W_init = tf .truncated_normal_initializer (stddev = 0.02 ),
1274
- b_init = tf .constant_initializer (value = 0.0 ),
1275
- W_init_args = None ,
1276
- b_init_args = None ,
1277
- name = 'conv1d' ,
1278
- ):
1260
+ class Conv1d (Layer ):
1279
1261
"""Simplified version of :class:`Conv1dLayer`.
1280
1262
1281
1263
Parameters
@@ -1301,17 +1283,12 @@ def conv1d(
1301
1283
b_init : initializer or None
1302
1284
The initializer for the bias vector. If None, skip biases.
1303
1285
W_init_args : dictionary
1304
- The arguments for the weight matrix initializer.
1286
+ The arguments for the weight matrix initializer (deprecated) .
1305
1287
b_init_args : dictionary
1306
- The arguments for the bias vector initializer.
1288
+ The arguments for the bias vector initializer (deprecated) .
1307
1289
name : str
1308
1290
A unique layer name
1309
1291
1310
- Returns
1311
- -------
1312
- :class:`Layer`
1313
- A :class:`Conv1dLayer` object.
1314
-
1315
1292
Examples
1316
1293
---------
1317
1294
>>> x = tf.placeholder(tf.float32, (batch_size, width))
@@ -1331,25 +1308,67 @@ def conv1d(
1331
1308
1332
1309
"""
1333
1310
1334
- if W_init_args is None :
1335
- W_init_args = {}
1336
- if b_init_args is None :
1337
- b_init_args = {}
1311
+ @deprecated_alias (layer = 'prev_layer' , end_support_version = 1.9 ) # TODO remove this line for the 1.9 release
1312
+ def __init__ (self ,
1313
+ prev_layer ,
1314
+ n_filter = 32 ,
1315
+ filter_size = 5 ,
1316
+ stride = 1 ,
1317
+ dilation_rate = 1 ,
1318
+ act = tf .identity ,
1319
+ padding = 'SAME' ,
1320
+ data_format = "channels_last" ,
1321
+ W_init = tf .truncated_normal_initializer (stddev = 0.02 ),
1322
+ b_init = tf .constant_initializer (value = 0.0 ),
1323
+ W_init_args = None ,
1324
+ b_init_args = None ,
1325
+ name = 'conv1d' ):
1338
1326
1339
- return Conv1dLayer (
1340
- prev_layer = prev_layer ,
1341
- act = act ,
1342
- shape = (filter_size , int (prev_layer .outputs .get_shape ()[- 1 ]), n_filter ),
1343
- stride = stride ,
1344
- dilation_rate = dilation_rate ,
1345
- padding = padding ,
1346
- data_format = data_format ,
1347
- W_init = W_init ,
1348
- b_init = b_init ,
1349
- W_init_args = W_init_args ,
1350
- b_init_args = b_init_args ,
1351
- name = name ,
1352
- )
1327
+ super (Conv1d , self ).__init__ (prev_layer = prev_layer , name = name )
1328
+ logging .info ("Conv1d %s: n_filter:%d filter_size:%s stride:%d pad:%s act:%s dilation_rate:%d" % (name , n_filter , filter_size , stride , padding ,
1329
+ act .__name__ , dilation_rate ))
1330
+
1331
+ self .inputs = prev_layer .outputs
1332
+ if tf .__version__ > '1.3' :
1333
+ con1d = tf .layers .Conv1D (
1334
+ filters = n_filter ,
1335
+ kernel_size = filter_size ,
1336
+ strides = stride ,
1337
+ padding = padding ,
1338
+ data_format = data_format ,
1339
+ dilation_rate = dilation_rate ,
1340
+ activation = act ,
1341
+ use_bias = (True if b_init else False ),
1342
+ kernel_initializer = W_init ,
1343
+ bias_initializer = b_init ,
1344
+ name = name )
1345
+ # con1d.dtype = LayersConfig.tf_dtype # unsupport, it will use the same dtype of inputs
1346
+ self .outputs = con1d (self .inputs )
1347
+ new_variables = con1d .weights # new_variables = tf.get_collection(TF_GRAPHKEYS_VARIABLES, scope=vs.name)
1348
+ self .all_layers .append (self .outputs )
1349
+ self .all_params .extend (new_variables )
1350
+ else :
1351
+ raise RuntimeError ("please update TF > 1.3 or downgrade TL < 1.8.4" )
1352
+ # if W_init_args is None:
1353
+ # W_init_args = {}
1354
+ # if b_init_args is None:
1355
+ # b_init_args = {}
1356
+ # data_format='HWC'
1357
+ # return Conv1dLayer(
1358
+
1359
+ # prev_layer=prev_layer,
1360
+ # act=act,
1361
+ # shape=(filter_size, int(prev_layer.outputs.get_shape()[-1]), n_filter),
1362
+ # stride=stride,
1363
+ # dilation_rate=dilation_rate,
1364
+ # padding=padding,
1365
+ # data_format=data_format,
1366
+ # W_init=W_init,
1367
+ # b_init=b_init,
1368
+ # W_init_args=W_init_args,
1369
+ # b_init_args=b_init_args,
1370
+ # name=name,
1371
+ # )
1353
1372
1354
1373
1355
1374
# TODO: DeConv1d
@@ -1682,9 +1701,6 @@ def __init__(self,
1682
1701
)
1683
1702
new_variables = tf .get_collection (TF_GRAPHKEYS_VARIABLES , scope = vs .name )
1684
1703
1685
- # self.all_layers = list(layer.all_layers)
1686
- # self.all_params = list(layer.all_params)
1687
- # self.all_drop = dict(layer.all_drop)
1688
1704
self .all_layers .append (self .outputs )
1689
1705
self .all_params .extend (new_variables )
1690
1706
@@ -2010,6 +2026,6 @@ def __init__(
2010
2026
2011
2027
# Alias
2012
2028
AtrousConv1dLayer = atrous_conv1d
2013
- Conv1d = conv1d
2029
+ # Conv1d = conv1d
2014
2030
# Conv2d = conv2d
2015
2031
# DeConv2d = deconv2d
0 commit comments