Skip to content

Commit 39c8bc2

Browse files
sergei-mironovtqchen
authored andcommitted
[TOPI] Specify non-zero absolute tolerance in tests (apache#1925)
1 parent be9784c commit 39c8bc2

125 files changed

Lines changed: 330 additions & 316 deletions

File tree

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

apps/extension/tests/test_ext.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ def check_llvm():
2222
a = tvm.nd.array(np.random.uniform(size=n).astype(A.dtype), ctx)
2323
b = tvm.nd.array(np.zeros(n, dtype=B.dtype), ctx)
2424
f(a, b)
25-
np.testing.assert_allclose(b.asnumpy(), a.asnumpy() + 1)
25+
tvm.testing.assert_allclose(b.asnumpy(), a.asnumpy() + 1)
2626
check_llvm()
2727

2828

docs/deploy/aocl_fpga.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -52,7 +52,7 @@ b = tvm.nd.array(np.random.uniform(size=n).astype("float32"), ctx)
5252
c = tvm.nd.array(np.zeros(n, dtype="float32"), ctx)
5353
5454
fadd(a, b, c)
55-
np.testing.assert_allclose(c.asnumpy(), a.asnumpy() + b.asnumpy())
55+
tvm.testing.assert_allclose(c.asnumpy(), a.asnumpy() + b.asnumpy())
5656
```
5757

5858
Setup

docs/deploy/aws_fpga.md

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -55,7 +55,7 @@ b = tvm.nd.array(np.random.uniform(size=n).astype("float32"), ctx)
5555
c = tvm.nd.array(np.zeros(n, dtype="float32"), ctx)
5656

5757
fadd(a, b, c)
58-
np.testing.assert_allclose(c.asnumpy(), a.asnumpy() + b.asnumpy())
58+
tvm.testing.assert_allclose(c.asnumpy(), a.asnumpy() + b.asnumpy())
5959
```
6060

6161
Setup

nnvm/python/nnvm/testing/check_computation.py

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -281,10 +281,10 @@ def check_function(symbol, forward=None, backward=None, grad_input_vars=None,
281281
Additional parameters for `check_numerical_grads`.
282282
283283
atol : float, optional
284-
Absolute tolerance for `np.testing.assert_allclose`. NOT used for numerical gradients.
284+
Absolute tolerance for `tvm.testing.assert_allclose`. NOT used for numerical gradients.
285285
286286
rtol : float, optional
287-
Relative tolerance for `np.testing.assert_allclose`. NOT used for numerical gradients.
287+
Relative tolerance for `tvm.testing.assert_allclose`. NOT used for numerical gradients.
288288
289289
quiet : bool, optional
290290
Don't dump additional information to stdout on failure.
@@ -466,7 +466,7 @@ def check_function(symbol, forward=None, backward=None, grad_input_vars=None,
466466
.format(len(numpy_res), out_len))
467467

468468
for i in range(out_len):
469-
np.testing.assert_allclose(nnvm_res[i], numpy_res[i], atol=atol, rtol=rtol)
469+
tvm.testing.assert_allclose(nnvm_res[i], numpy_res[i], atol=atol, rtol=rtol)
470470

471471
if backward is not None:
472472
nothing_was_done = False
@@ -495,8 +495,8 @@ def check_function(symbol, forward=None, backward=None, grad_input_vars=None,
495495
.format(set(grad_var_names) - set(numpy_grads)))
496496

497497
for x_name in numpy_grads:
498-
np.testing.assert_allclose(nnvm_grads[x_name], numpy_grads[x_name],
499-
atol=atol, rtol=rtol)
498+
tvm.testing.assert_allclose(nnvm_grads[x_name], numpy_grads[x_name],
499+
atol=atol, rtol=rtol)
500500

501501
if numerical_grads:
502502
nothing_was_done = False

nnvm/tests/python/compiler/test_build.py

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ def verify(graph, lib):
2727
# get outputs
2828
out = tvm.nd.empty(shape, dtype)
2929
get_output(0, out)
30-
np.testing.assert_allclose(
30+
tvm.testing.assert_allclose(
3131
out.asnumpy(), np.exp(na.asnumpy() + nb.asnumpy()))
3232

3333
graph, lib, _ = nnvm.compiler.build(z, "llvm", shape_dict)
@@ -49,7 +49,7 @@ def test_run():
4949
nx = tvm.nd.array(np.random.uniform(size=shape).astype(dtype))
5050
ny = tvm.nd.array(np.random.uniform(size=shape).astype(dtype))
5151
res = _run_graph(z, {"x": nx, "y": ny})
52-
np.testing.assert_allclose(
52+
tvm.testing.assert_allclose(
5353
res[0].asnumpy(), np.exp(nx.asnumpy() + ny.asnumpy()))
5454

5555

@@ -73,7 +73,7 @@ def test_precompute_prune():
7373
m["load_params"](nnvm.compiler.save_param_dict(params))
7474
m.run()
7575
out = m.get_output(0, out=res)
76-
np.testing.assert_allclose(
76+
tvm.testing.assert_allclose(
7777
res.asnumpy(), nx.asnumpy() + 1 + ny.asnumpy() + na.asnumpy())
7878

7979

@@ -92,7 +92,7 @@ def test_dtypes():
9292
m.run(x=data)
9393
data = (data > 0) * data
9494
out = m.get_output(0, tvm.nd.empty(oshape, dtype))
95-
np.testing.assert_allclose(out.asnumpy(), data, atol=1e-5, rtol=1e-5)
95+
tvm.testing.assert_allclose(out.asnumpy(), data, atol=1e-5, rtol=1e-5)
9696

9797
def test_ndarray_output():
9898
x = sym.Variable("x")
@@ -110,7 +110,7 @@ def test_ndarray_output():
110110
m.set_input("y", ny)
111111
m.run()
112112
out = m.get_output(0)
113-
np.testing.assert_allclose(
113+
tvm.testing.assert_allclose(
114114
out.asnumpy(), nx.asnumpy() + ny.asnumpy())
115115

116116
def test_ndarray_input():
@@ -131,12 +131,12 @@ def test_ndarray_input():
131131
in_y = tvm.nd.empty(shape, dtype)
132132
m.get_input("x", in_x)
133133
m.get_input("y", in_y)
134-
np.testing.assert_allclose(nx.asnumpy(), in_x.asnumpy())
135-
np.testing.assert_allclose(ny.asnumpy(), in_y.asnumpy())
134+
tvm.testing.assert_allclose(nx.asnumpy(), in_x.asnumpy())
135+
tvm.testing.assert_allclose(ny.asnumpy(), in_y.asnumpy())
136136
in_nx = m.get_input("x")
137137
in_ny = m.get_input("y")
138-
np.testing.assert_allclose(nx.asnumpy(), in_nx.asnumpy())
139-
np.testing.assert_allclose(ny.asnumpy(), in_ny.asnumpy())
138+
tvm.testing.assert_allclose(nx.asnumpy(), in_nx.asnumpy())
139+
tvm.testing.assert_allclose(ny.asnumpy(), in_ny.asnumpy())
140140

141141
def test_num_outputs():
142142
x = sym.Variable('x')

nnvm/tests/python/compiler/test_compiler_cache.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,7 @@ def verify(graph, lib):
1919
m.run(x=na, y=nb)
2020
# get outputs
2121
out = m.get_output(0, tvm.nd.empty(shape, dtype))
22-
np.testing.assert_allclose(
22+
tvm.testing.assert_allclose(
2323
out.asnumpy(), np.exp(na.asnumpy() + nb.asnumpy()))
2424

2525
engine = nnvm.compiler.engine

nnvm/tests/python/compiler/test_fold_axis.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
"""Unittest cases for fold_axis"""
2+
import tvm
23
import nnvm
34
import nnvm.testing.resnet
45
import numpy as np
@@ -147,7 +148,7 @@ def run_prune(graph, params, opt_level):
147148

148149
x = run_prune(graph, params, 0)
149150
y = run_prune(graph, params, 3)
150-
np.testing.assert_allclose(y[0].asnumpy(), x[0].asnumpy())
151+
tvm.testing.assert_allclose(y[0].asnumpy(), x[0].asnumpy())
151152

152153

153154
if __name__ == "__main__":

nnvm/tests/python/compiler/test_nhwc_layout.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -50,7 +50,7 @@ def test_nhwc():
5050
oshape_nhwc = (1, 224, 224, out_channel)
5151
nchw_output = build_and_run(nchw_sym, nchw_params, data, oshape)
5252
nhwc_output = build_and_run(nhwc_sym, nhwc_params, data.transpose(0, 2, 3, 1), oshape_nhwc)
53-
np.testing.assert_allclose(nchw_output, nhwc_output.transpose(0, 3, 1, 2), rtol=1e-5, atol=1e-5)
53+
tvm.testing.assert_allclose(nchw_output, nhwc_output.transpose(0, 3, 1, 2), rtol=1e-5, atol=1e-5)
5454

5555

5656
if __name__ == "__main__":

nnvm/tests/python/compiler/test_op_fusion.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -22,7 +22,7 @@ def test_ewise_injective():
2222
x_np = np.random.uniform(size=dshape).astype(dtype)
2323
m.run(x=x_np)
2424
out = m.get_output(0, tvm.nd.empty((10, 6)))
25-
np.testing.assert_allclose(
25+
tvm.testing.assert_allclose(
2626
out.asnumpy(), x_np.reshape(out.shape) * 2 + 1,
2727
atol=1e-5, rtol=1e-5)
2828

@@ -54,7 +54,7 @@ def test_conv_ewise_injective():
5454
data.asnumpy(), kernel.asnumpy(), (1,1), 'SAME')
5555
c_np = c_np + bias.asnumpy().reshape(kshape[0], 1, 1) + 1
5656
c_np = c_np.reshape(c_np.shape[0], np.prod(c_np.shape[1:])) + 1
57-
np.testing.assert_allclose(out.asnumpy(), c_np, rtol=1e-5)
57+
tvm.testing.assert_allclose(out.asnumpy(), c_np, rtol=1e-5)
5858

5959

6060
def test_injective_reduce_injective():
@@ -74,7 +74,7 @@ def test_injective_reduce_injective():
7474
c_np = np.sum(data.reshape(32, 18 * 18) + 1, axis=1)
7575
# get output
7676
out = m.get_output(0, tvm.nd.empty(c_np.shape, dtype))
77-
np.testing.assert_allclose(out.asnumpy(), c_np, rtol=1e-5)
77+
tvm.testing.assert_allclose(out.asnumpy(), c_np, rtol=1e-5)
7878

7979

8080
def test_injective_conv2d():
@@ -107,7 +107,7 @@ def test_injective_conv2d():
107107
data.asnumpy(), kernel.asnumpy(), (1,1), 'SAME')
108108
weight = np.mean(data.asnumpy(), axis=(2, 3))
109109
c_np = weight[:, :, np.newaxis, np.newaxis] * data.asnumpy() + residual
110-
np.testing.assert_allclose(out.asnumpy(), c_np, rtol=1e-5)
110+
tvm.testing.assert_allclose(out.asnumpy(), c_np, rtol=1e-5)
111111

112112

113113
def test_concatenate_conv2d():
@@ -140,7 +140,7 @@ def test_concatenate_conv2d():
140140
conv = topi.testing.conv2d_nchw_python(
141141
concat, kernel.asnumpy(), (1,1), 'SAME')
142142
ref = concat + conv
143-
np.testing.assert_allclose(out.asnumpy(), ref, rtol=1e-5)
143+
tvm.testing.assert_allclose(out.asnumpy(), ref, rtol=1e-5)
144144

145145

146146
def test_residual_block_layout_transform():
@@ -178,7 +178,7 @@ def test_residual_block_layout_transform():
178178
conv2 = topi.testing.conv2d_nchw_python(
179179
conv1, kernel2.asnumpy(), (1,1), 'SAME')
180180
ref = np.maximum(conv1 + conv2, 0)
181-
np.testing.assert_allclose(out.asnumpy(), ref, rtol=1e-5)
181+
tvm.testing.assert_allclose(out.asnumpy(), ref, rtol=1e-5)
182182

183183

184184
def build_and_run(sym, params, data, out_shape, target, ctx, opt_level=2):
@@ -218,7 +218,7 @@ def get_sym(out_channel):
218218
_, params2 = utils.create_workload(sym2, 1, dshape[1:], seed=0)
219219
output1, g1 = build_and_run(sym1, params1, data, oshape, target, ctx, opt_level=2)
220220
output2, g2 = build_and_run(sym2, params2, data, oshape, target, ctx, opt_level=0)
221-
np.testing.assert_allclose(output1, output2, rtol=1e-5, atol=1e-5)
221+
tvm.testing.assert_allclose(output1, output2, rtol=1e-5, atol=1e-5)
222222
# data, conv weight, bias, batch norm gamma, batch norm beta, conv op
223223
assert g1.index.num_nodes == 6
224224

nnvm/tests/python/compiler/test_optimizer.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -27,7 +27,7 @@ def helper(symbol, inputs, params, update_func, run_times, target, ctx, dtype="f
2727
m.run()
2828
y_np = update_func(**np_inputs)
2929
out = m.get_output(0, tvm.nd.empty(y_np.shape, dtype))
30-
np.testing.assert_allclose(out.asnumpy(), y_np, atol=1e-5, rtol=1e-5)
30+
tvm.testing.assert_allclose(out.asnumpy(), y_np, atol=1e-5, rtol=1e-5)
3131

3232

3333
def test_sgd():

0 commit comments

Comments
 (0)