Skip to content
This repository was archived by the owner on Feb 1, 2020. It is now read-only.

Commit a7788cb

Browse files
authored
Merge branch 'master' into master
2 parents 35b5a18 + 0e00ca3 commit a7788cb

6 files changed

Lines changed: 16 additions & 5 deletions

File tree

Makefile

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -104,3 +104,4 @@ clean:
104104
-include build/*.d
105105
-include build/*/*.d
106106
-include build/*/*/*.d
107+
-include build/*/*/*/*.d

python/nnvm/frontend/keras.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -40,7 +40,7 @@ def _convert_activation(insym, keras_layer, _):
4040
return _sym.__add_scalar__(_sym.__mul_scalar__(insym, \
4141
scalar=alpha), scalar=beta)
4242
elif act_type == 'softmax':
43-
return _sym.softmax(insym)
43+
return _sym.softmax(insym, axis=1)
4444
elif act_type == 'sigmoid':
4545
return _sym.sigmoid(insym)
4646
elif act_type == 'tanh':

src/top/tensor/matrix_op.cc

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -34,8 +34,8 @@ inline bool DotShape(const nnvm::NodeAttrs& attrs,
3434
<< "dot shape inconsistent: " << lshape << " X " << rshape;
3535

3636
TShape oshape(lshape.ndim() + rshape.ndim() - 2);
37-
for (int i = 0; i < lshape.ndim() - 1; i++) oshape[i] = lshape[i];
38-
for (int i = 1; i < rshape.ndim(); i++) oshape[i + lshape.ndim() - 2] = rshape[i];
37+
for (uint32_t i = 0; i < lshape.ndim() - 1; i++) oshape[i] = lshape[i];
38+
for (uint32_t i = 1; i < rshape.ndim(); i++) oshape[i + lshape.ndim() - 2] = rshape[i];
3939

4040
NNVM_ASSIGN_OUTPUT_SHAPE(attrs, *out_attrs, 0, oshape);
4141
return true;

src/top/tensor/reduce.cc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -145,7 +145,7 @@ Example::
145145
for (dim_t i = 0; i < param.axis.ndim(); ++i) {
146146
exclude_axis.insert(param.axis[i]);
147147
}
148-
for (dim_t i = 0; i < inputs[0].ndim(); ++i) {
148+
for (dim_t i = 0; i < static_cast<int>(inputs[0].ndim()); ++i) {
149149
if (exclude_axis.count(i) == 0) {
150150
axis.push_back(make_const(Int(32), i));
151151
}

src/top/tensor/transform.cc

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -760,7 +760,7 @@ inline bool TransposeCorrectLayout(const NodeAttrs& attrs,
760760
} else {
761761
CHECK_EQ(input.ndim(), param.axes.ndim());
762762
for (size_t i = 0; i < input.ndim(); ++i) {
763-
CHECK(param.axes[i] < input.ndim());
763+
CHECK(param.axes[i] < static_cast<int>(input.ndim()));
764764
new_layout << input.at(param.axes[i]);
765765
}
766766
}

tests/python/frontend/keras/test_forward.py

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -59,6 +59,15 @@ def test_forward_elemwise_add():
5959
verify_keras_frontend(keras_model)
6060

6161

62+
def test_forward_softmax():
63+
data = keras.layers.Input(shape=(32,32,3))
64+
x = keras.layers.Activation('softmax')(data)
65+
x = keras.layers.Concatenate()([x, x])
66+
x = keras.layers.GlobalMaxPooling2D()(x)
67+
keras_model = keras.models.Model(data, x)
68+
verify_keras_frontend(keras_model)
69+
70+
6271
def test_forward_softrelu():
6372
data = keras.layers.Input(shape=(32,32,3))
6473
x = keras.layers.Activation('softplus')(data)
@@ -145,6 +154,7 @@ def test_forward_resnet50():
145154

146155
if __name__ == '__main__':
147156
test_forward_elemwise_add()
157+
test_forward_softmax()
148158
test_forward_softrelu()
149159
test_forward_leaky_relu()
150160
test_forward_dense()

0 commit comments

Comments
 (0)