Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -108,6 +108,7 @@ To release a new version, please update the changelog as followed:
- Fix `tf.models.Model._construct_graph` for list of outputs, e.g. STN case (PR #1010)
- Enable better `in_channels` exception raise. (pR #1015)
- Set allow_pickle=True in np.load() (#PR 1021)
- Remove `private_method` decorator (#PR 1025)

### Removed

Expand All @@ -116,7 +117,7 @@ To release a new version, please update the changelog as followed:
### Contributors

- @zsdonghao
- @ChrisWu1997: #1010 #1015
- @ChrisWu1997: #1010 #1015 #1025
- @warshallrho: #1017 #1021
- @ArnoldLIULJ: #1023
- @JingqingZ: #1023
Expand Down
7 changes: 0 additions & 7 deletions tensorlayer/layers/convolution/deformable_conv.py
Original file line number Diff line number Diff line change
Expand Up @@ -235,26 +235,22 @@ def forward(self, inputs):
outputs = self.act(outputs)
return outputs

@private_method
def _to_bc_h_w(self, x, x_shape):
"""(b, h, w, c) -> (b*c, h, w)"""
x = tf.transpose(a=x, perm=[0, 3, 1, 2])
x = tf.reshape(x, (-1, x_shape[1], x_shape[2]))
return x

@private_method
def _to_b_h_w_n_c(self, x, x_shape):
"""(b*c, h, w, n) -> (b, h, w, n, c)"""
x = tf.reshape(x, (-1, x_shape[4], x_shape[1], x_shape[2], x_shape[3]))
x = tf.transpose(a=x, perm=[0, 2, 3, 4, 1])
return x

@private_method
def tf_flatten(self, a):
"""Flatten tensor"""
return tf.reshape(a, [-1])

@private_method
def _get_vals_by_coords(self, inputs, coords, idx, out_shape):
indices = tf.stack(
[idx, self.tf_flatten(coords[:, :, :, :, 0]),
Expand All @@ -264,7 +260,6 @@ def _get_vals_by_coords(self, inputs, coords, idx, out_shape):
vals = tf.reshape(vals, out_shape)
return vals

@private_method
def _tf_repeat(self, a, repeats):
"""Tensorflow version of np.repeat for 1D"""
# https://github.com/tensorflow/tensorflow/issues/8521
Expand All @@ -277,7 +272,6 @@ def _tf_repeat(self, a, repeats):
a = self.tf_flatten(a)
return a

@private_method
def _tf_batch_map_coordinates(self, inputs, coords):
"""Batch version of tf_map_coordinates

Expand Down Expand Up @@ -324,7 +318,6 @@ def _tf_batch_map_coordinates(self, inputs, coords):

return mapped_vals

@private_method
def _tf_batch_map_offsets(self, inputs, offsets, grid_offset):
"""Batch map offsets into input

Expand Down
2 changes: 0 additions & 2 deletions tensorlayer/layers/convolution/super_resolution.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,6 @@ def forward(self, inputs):
outputs = self.act(outputs)
return outputs

@private_method
def _PS(self, I, r):
X = tf.transpose(a=I, perm=[2, 1, 0]) # (r, w, b)
X = tf.batch_to_space(input=X, block_shape=[r], crops=[[0, 0]]) # (1, r*w, b)
Expand Down Expand Up @@ -188,7 +187,6 @@ def forward(self, inputs):
outputs = self.act(outputs)
return outputs

@private_method
def _PS(self, X, r, n_out_channels):

_err_log = "SubpixelConv2d: The number of input channels == (scale x scale) x The number of output channels"
Expand Down