diff --git a/python/mxnet/__init__.py b/python/mxnet/__init__.py index efdd02a3be6a..01bc07f23658 100644 --- a/python/mxnet/__init__.py +++ b/python/mxnet/__init__.py @@ -110,3 +110,5 @@ from . import _api_internal from . import api from . import container + +npx.set_np() diff --git a/tests/python/unittest/test_contrib_control_flow.py b/tests/python/unittest/test_contrib_control_flow.py index 962ce6239115..2d0602378e03 100644 --- a/tests/python/unittest/test_contrib_control_flow.py +++ b/tests/python/unittest/test_contrib_control_flow.py @@ -27,6 +27,7 @@ from mxnet.attribute import AttrScope from common import with_seed +mx.reset_np() @with_seed() def test_while_loop_simple_forward(): diff --git a/tests/python/unittest/test_contrib_io.py b/tests/python/unittest/test_contrib_io.py index dbae69fe7294..b7cb9b8c9c47 100644 --- a/tests/python/unittest/test_contrib_io.py +++ b/tests/python/unittest/test_contrib_io.py @@ -15,12 +15,15 @@ # specific language governing permissions and limitations # under the License. +import mxnet as mx import mxnet.ndarray as nd from mxnet.gluon.data.vision.datasets import * from mxnet.gluon.data.dataloader import * from mxnet.contrib.io import * from mxnet.test_utils import * +mx.reset_np() + def test_contrib_DataLoaderIter(): def test_mnist_batches(batch_size, expected, last_batch='discard'): dataset = MNIST(train=False) diff --git a/tests/python/unittest/test_contrib_stes_op.py b/tests/python/unittest/test_contrib_stes_op.py index 26ab6f9491e4..729f9fc4c698 100644 --- a/tests/python/unittest/test_contrib_stes_op.py +++ b/tests/python/unittest/test_contrib_stes_op.py @@ -20,6 +20,7 @@ from mxnet import nd, autograd, gluon from mxnet.test_utils import default_context +mx.reset_np() class RoundSTENET(gluon.HybridBlock): def __init__(self, w_init, **kwargs): diff --git a/tests/python/unittest/test_contrib_text.py b/tests/python/unittest/test_contrib_text.py index 44d4422c7819..302dcde42aac 100644 --- a/tests/python/unittest/test_contrib_text.py +++ b/tests/python/unittest/test_contrib_text.py @@ -20,12 +20,14 @@ from collections import Counter from common import assertRaises +import mxnet as mx from mxnet import ndarray as nd from mxnet.test_utils import * from mxnet.contrib import text import pytest +mx.reset_np() def _get_test_str_of_tokens(token_delim, seq_delim): seq1 = token_delim + token_delim.join(['Life', 'is', 'great', '!']) + token_delim + seq_delim diff --git a/tests/python/unittest/test_exc_handling.py b/tests/python/unittest/test_exc_handling.py index 72e21272a1c2..c58f65130d4d 100644 --- a/tests/python/unittest/test_exc_handling.py +++ b/tests/python/unittest/test_exc_handling.py @@ -25,6 +25,7 @@ from mxnet.test_utils import assert_exception, default_context, set_default_context, use_np import pytest +mx.reset_np() @with_seed() @pytest.mark.skipif(os.environ.get('MXNET_ENGINE_TYPE') == 'NaiveEngine', diff --git a/tests/python/unittest/test_gluon.py b/tests/python/unittest/test_gluon.py index 77d5119e7dff..b5be13db86ca 100644 --- a/tests/python/unittest/test_gluon.py +++ b/tests/python/unittest/test_gluon.py @@ -38,6 +38,8 @@ import random import tempfile +mx.reset_np() + @with_seed() def test_parameter(): p = gluon.Parameter('weight', shape=(10, 10)) @@ -192,11 +194,11 @@ def __init__(self, **kwargs): net = Net() lines = str(net.collect_params()).splitlines() - + assert 'dense0.weight' in lines[0] assert '(10, 5)' in lines[0] assert 'float32' in lines[0] - + @with_seed() def test_collect_parameters(): @@ -1735,7 +1737,7 @@ def mon_callback(node_name, opr_name, arr): model.add(mx.gluon.nn.AvgPool1D()) model.initialize() model.hybridize() - check_name(model, [model[0].name + '_fwd_data', model[0].name + '_fwd_output'], + check_name(model, [model[0].name + '_fwd_data', model[0].name + '_fwd_output'], expected_opr_names=["Pooling"], monitor_all=True) # stack two layers and test diff --git a/tests/python/unittest/test_gluon_batch_processor.py b/tests/python/unittest/test_gluon_batch_processor.py index bff80813bb12..f84ca6a6ef45 100644 --- a/tests/python/unittest/test_gluon_batch_processor.py +++ b/tests/python/unittest/test_gluon_batch_processor.py @@ -29,6 +29,8 @@ from mxnet.gluon.contrib.estimator.batch_processor import BatchProcessor import pytest +mx.reset_np() + def _get_test_network(): net = nn.Sequential() net.add(nn.Dense(4, activation='relu', flatten=False)) diff --git a/tests/python/unittest/test_gluon_contrib.py b/tests/python/unittest/test_gluon_contrib.py index 33ea1e495e91..ff519f7d320b 100644 --- a/tests/python/unittest/test_gluon_contrib.py +++ b/tests/python/unittest/test_gluon_contrib.py @@ -28,6 +28,7 @@ from common import setup_module, with_seed, teardown_module import numpy as np +mx.reset_np() def check_rnn_cell(cell, in_shape=(10, 50), out_shape=(10, 100), begin_state=None): inputs = [mx.sym.Variable('rnn_t%d_data'%i) for i in range(3)] diff --git a/tests/python/unittest/test_gluon_data_vision.py b/tests/python/unittest/test_gluon_data_vision.py index 0546eb11c875..0456f38d079c 100644 --- a/tests/python/unittest/test_gluon_data_vision.py +++ b/tests/python/unittest/test_gluon_data_vision.py @@ -30,6 +30,8 @@ import numpy as np +mx.reset_np() + @with_seed() def test_to_tensor(): # 3D Input diff --git a/tests/python/unittest/test_gluon_estimator.py b/tests/python/unittest/test_gluon_estimator.py index a18dce054744..52daca432c5c 100644 --- a/tests/python/unittest/test_gluon_estimator.py +++ b/tests/python/unittest/test_gluon_estimator.py @@ -28,6 +28,7 @@ from mxnet.gluon.contrib.estimator import * from mxnet.gluon.contrib.estimator.event_handler import * +mx.reset_np() def _get_test_network(params=None): net = nn.Sequential() diff --git a/tests/python/unittest/test_gluon_event_handler.py b/tests/python/unittest/test_gluon_event_handler.py index 4cadc9466ed1..bb570719b1e6 100644 --- a/tests/python/unittest/test_gluon_event_handler.py +++ b/tests/python/unittest/test_gluon_event_handler.py @@ -34,6 +34,8 @@ except ImportError: from io import StringIO +mx.reset_np() + class AxisArrayDataset(Dataset): def __init__(self, * args): self._length = len(args[1]) diff --git a/tests/python/unittest/test_gluon_model_zoo.py b/tests/python/unittest/test_gluon_model_zoo.py index 191a070be287..b112816b7fba 100644 --- a/tests/python/unittest/test_gluon_model_zoo.py +++ b/tests/python/unittest/test_gluon_model_zoo.py @@ -27,6 +27,7 @@ def eprint(*args, **kwargs): print(*args, file=sys.stderr, **kwargs) +mx.reset_np() @with_seed() @pytest.mark.parametrize('model_name', [ diff --git a/tests/python/unittest/test_gluon_rnn.py b/tests/python/unittest/test_gluon_rnn.py index 933c2c17d95f..8097b88016f8 100644 --- a/tests/python/unittest/test_gluon_rnn.py +++ b/tests/python/unittest/test_gluon_rnn.py @@ -26,6 +26,7 @@ from mxnet.test_utils import almost_equal, assert_almost_equal from common import assert_raises_cudnn_not_satisfied, with_seed, retry +mx.reset_np() def check_rnn_states(fused_states, stack_states, num_layers, bidirectional=False, is_lstm=True): directions = 2 if bidirectional else 1 @@ -212,11 +213,11 @@ def test_residual_bidirectional(): inputs = [mx.sym.Variable('rnn_t%d_data'%i) for i in range(2)] outputs, _ = cell.unroll(2, inputs, merge_outputs=False) outputs = mx.sym.Group(outputs) - params = cell.collect_params() + params = cell.collect_params() assert sorted(params.keys()) == \ - ['base_cell.l_cell.h2h_bias', 'base_cell.l_cell.h2h_weight', + ['base_cell.l_cell.h2h_bias', 'base_cell.l_cell.h2h_weight', 'base_cell.l_cell.i2h_bias', 'base_cell.l_cell.i2h_weight', - 'base_cell.r_cell.h2h_bias', 'base_cell.r_cell.h2h_weight', + 'base_cell.r_cell.h2h_bias', 'base_cell.r_cell.h2h_weight', 'base_cell.r_cell.i2h_bias', 'base_cell.r_cell.i2h_weight'] # assert outputs.list_outputs() == \ # ['bi_t0_plus_residual_output', 'bi_t1_plus_residual_output'] diff --git a/tests/python/unittest/test_gluon_trainer.py b/tests/python/unittest/test_gluon_trainer.py index 8cf78042411e..91204dab2f2d 100644 --- a/tests/python/unittest/test_gluon_trainer.py +++ b/tests/python/unittest/test_gluon_trainer.py @@ -26,6 +26,8 @@ from copy import deepcopy import pytest +mx.reset_np() + def dict_equ(a, b): assert set(a) == set(b) for k in a: diff --git a/tests/python/unittest/test_image.py b/tests/python/unittest/test_image.py index 9cb287a50b11..36259924309f 100644 --- a/tests/python/unittest/test_image.py +++ b/tests/python/unittest/test_image.py @@ -26,6 +26,7 @@ import unittest import pytest +mx.reset_np() def _get_data(url, dirname): import os, tarfile diff --git a/tests/python/unittest/test_infer_shape.py b/tests/python/unittest/test_infer_shape.py index d6a8a432da4a..0df991ab5fd3 100644 --- a/tests/python/unittest/test_infer_shape.py +++ b/tests/python/unittest/test_infer_shape.py @@ -20,6 +20,8 @@ from common import models import pytest +mx.reset_np() + def test_mlp2_infer_shape(): # Build MLP out = models.mlp2() diff --git a/tests/python/unittest/test_loss.py b/tests/python/unittest/test_loss.py index 9c1d496d715e..5f7dc0f363aa 100644 --- a/tests/python/unittest/test_loss.py +++ b/tests/python/unittest/test_loss.py @@ -22,6 +22,7 @@ from common import setup_module, with_seed, teardown_module, xfail_when_nonstandard_decimal_separator import unittest +mx.reset_np() @xfail_when_nonstandard_decimal_separator @with_seed() diff --git a/tests/python/unittest/test_numpy_gluon.py b/tests/python/unittest/test_numpy_gluon.py index a3adad68f985..a8262c604cd3 100644 --- a/tests/python/unittest/test_numpy_gluon.py +++ b/tests/python/unittest/test_numpy_gluon.py @@ -44,6 +44,8 @@ def check_block_params(x, TestBlock, hybridize, expected_type, initializer): for k, v in params.items(): assert type(v.data()) is expected_type + mx.reset_np() + class TestBlock1(gluon.HybridBlock): def __init__(self): super(TestBlock1, self).__init__()