diff --git a/ci/docker/runtime_functions.sh b/ci/docker/runtime_functions.sh index c899fe5c1458..a0795eb58a5a 100755 --- a/ci/docker/runtime_functions.sh +++ b/ci/docker/runtime_functions.sh @@ -895,6 +895,44 @@ nightly_test_javascript() { make -C /work/mxnet/amalgamation libmxnet_predict.js MIN=1 EMCC=/work/deps/emscripten/emcc } +# Nightly 'MXNet: The Straight Dope' Single-GPU Tests +nightly_straight_dope_python2_single_gpu_tests() { + set -ex + cd /work/mxnet/tests/nightly/straight_dope + export PYTHONPATH=/work/mxnet/python/ + export MXNET_TEST_KERNEL=python2 + nosetests-2.7 --with-xunit --xunit-file nosetests_straight_dope_python2_single_gpu.xml \ + test_notebooks_single_gpu.py --nologcapture +} + +nightly_straight_dope_python3_single_gpu_tests() { + set -ex + cd /work/mxnet/tests/nightly/straight_dope + export PYTHONPATH=/work/mxnet/python/ + export MXNET_TEST_KERNEL=python3 + nosetests-3.4 --with-xunit --xunit-file nosetests_straight_dope_python3_single_gpu.xml \ + test_notebooks_single_gpu.py --nologcapture +} + +# Nightly 'MXNet: The Straight Dope' Multi-GPU Tests +nightly_straight_dope_python2_multi_gpu_tests() { + set -ex + cd /work/mxnet/tests/nightly/straight_dope + export PYTHONPATH=/work/mxnet/python/ + export MXNET_TEST_KERNEL=python2 + nosetests-2.7 --with-xunit --xunit-file nosetests_straight_dope_python2_multi_gpu.xml \ + test_notebooks_multi_gpu.py --nologcapture +} + +nightly_straight_dope_python3_multi_gpu_tests() { + set -ex + cd /work/mxnet/tests/nightly/straight_dope + export PYTHONPATH=/work/mxnet/python/ + export MXNET_TEST_KERNEL=python3 + nosetests-3.4 --with-xunit --xunit-file nosetests_straight_dope_python3_multi_gpu.xml \ + test_notebooks_multi_gpu.py --nologcapture +} + # Deploy deploy_docs() { diff --git a/tests/nightly/JenkinsfileForBinaries b/tests/nightly/JenkinsfileForBinaries index 3d958b1de7ed..0b009d28a55a 100755 --- a/tests/nightly/JenkinsfileForBinaries +++ b/tests/nightly/JenkinsfileForBinaries @@ -98,6 +98,42 @@ try { docker_run('ubuntu_nightly_gpu', 'nightly_test_KVStore_singleNode', true) } } + }, + 'StraightDope: Python2 Single-GPU': { + node('mxnetlinux-gpu-p3') { + ws('workspace/straight_dope-single_gpu') { + init_git() + unpack_lib('gpu', mx_lib) + docker_run('ubuntu_nightly_gpu', 'nightly_straight_dope_python2_single_gpu_tests', true) + } + } + }, + 'StraightDope: Python2 Multi-GPU': { + node('mxnetlinux-gpu') { + ws('workspace/straight_dope-multi_gpu') { + init_git() + unpack_lib('gpu', mx_lib) + docker_run('ubuntu_nightly_gpu', 'nightly_straight_dope_python2_multi_gpu_tests', true) + } + } + }, + 'StraightDope: Python3 Single-GPU': { + node('mxnetlinux-gpu-p3') { + ws('workspace/straight_dope-single_gpu') { + init_git() + unpack_lib('gpu', mx_lib) + docker_run('ubuntu_nightly_gpu', 'nightly_straight_dope_python3_single_gpu_tests', true) + } + } + }, + 'StraightDope: Python3 Multi-GPU': { + node('mxnetlinux-gpu') { + ws('workspace/straight_dope-multi_gpu') { + init_git() + unpack_lib('gpu', mx_lib) + docker_run('ubuntu_nightly_gpu', 'nightly_straight_dope_python3_multi_gpu_tests', true) + } + } } } } catch (caughtError) { diff --git a/tests/nightly/straight_dope/README.md b/tests/nightly/straight_dope/README.md new file mode 100755 index 000000000000..65a615b58d7e --- /dev/null +++ b/tests/nightly/straight_dope/README.md @@ -0,0 +1,7 @@ +# Nightly Tests for MXNet: The Straight Dope + +These are some longer running tests that are scheduled to run every night. + +### Description +These tests verify that the straight dope tutorials run without error. They are +run on both single and multi-gpu configurations. diff --git a/tests/nightly/straight_dope/straight_dope_test_utils.py b/tests/nightly/straight_dope/straight_dope_test_utils.py new file mode 100644 index 000000000000..bb64f37fe5f5 --- /dev/null +++ b/tests/nightly/straight_dope/straight_dope_test_utils.py @@ -0,0 +1,130 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +#pylint: disable=no-member, too-many-locals, too-many-branches, no-self-use, broad-except, lost-exception, too-many-nested-blocks, too-few-public-methods, invalid-name +""" + This file tests and ensures that all straight dope notebooks run + without warning or exception. + + env variable MXNET_TEST_KERNEL controls which kernel to use when running + the notebook. e.g: `export MXNET_TEST_KERNEL=python2` +""" +import io +import os +import re +import shutil +import subprocess +import sys +from time import sleep + +#TODO(vishaalk): Find a cleaner way to import this notebook. +sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', 'utils')) +from notebook_test import run_notebook + +EPOCHS_REGEX = r'epochs\s+=\s+[0-9]+' # Regular expression that matches 'epochs = #' +GIT_PATH = '/usr/bin/git' +GIT_REPO = 'https://github.com/zackchase/mxnet-the-straight-dope' +KERNEL = os.getenv('MXNET_TEST_KERNEL', None) +NOTEBOOKS_DIR = os.path.join(os.path.dirname(__file__), 'tmp_notebook') + +def _test_notebook(notebook, override_epochs=True): + """Run Jupyter notebook to catch any execution error. + + Args: + notebook : string + notebook name in folder/notebook format + epochs : boolean + whether or not to override the number of epochs to 1 + Returns: + True if the notebook runs without warning or error. + """ + if override_epochs: + _override_epochs(notebook) + return run_notebook(notebook, NOTEBOOKS_DIR, kernel=KERNEL, temp_dir=NOTEBOOKS_DIR) + + +def _override_epochs(notebook): + """Overrides the number of epochs in the notebook to 1 epoch. Note this operation is idempotent. + + Args: + notebook : string + notebook name in folder/notebook format + + """ + notebook_path = os.path.join(*([NOTEBOOKS_DIR] + notebook.split('/'))) + ".ipynb" + + # Read the notebook and set epochs to num_epochs + with io.open(notebook_path, 'r', encoding='utf-8') as f: + notebook = f.read() + + # Set number of epochs to 1 + modified_notebook = re.sub(EPOCHS_REGEX, 'epochs = 1', notebook) + + # Replace the original notebook with the modified one. + with io.open(notebook_path, 'w', encoding='utf-8') as f: + f.write(modified_notebook) + + +def _download_straight_dope_notebooks(): + """Downloads the Straight Dope Notebooks. + + Returns: + True if it succeeds in downloading the notebooks without error. + """ + print('Cleaning and setting up notebooks directory "{}"'.format(NOTEBOOKS_DIR)) + shutil.rmtree(NOTEBOOKS_DIR, ignore_errors=True) + + cmd = [GIT_PATH, + 'clone', + GIT_REPO, + NOTEBOOKS_DIR] + + proc, msg = _run_command(cmd) + + if proc.returncode != 0: + err_msg = 'Error downloading Straight Dope notebooks.\n' + err_msg += msg + print(err_msg) + return False + return True + +def _run_command(cmd, timeout_secs=300): + """ Runs a command with a specified timeout. + + Args: + cmd : list of string + The command with arguments to run. + timeout_secs: integer + The timeout in seconds + + Returns: + Returns the process and the output as a pair. + """ + proc = subprocess.Popen( + cmd, + stdout=subprocess.PIPE, + stderr=subprocess.STDOUT) + + for i in range(timeout_secs): + sleep(1) + if proc.poll() is not None: + (out, _) = proc.communicate() + return proc, out.decode('utf-8') + + proc.kill() + return proc, "Timeout of %s secs exceeded." % timeout_secs + diff --git a/tests/nightly/straight_dope/test_notebooks_multi_gpu.py b/tests/nightly/straight_dope/test_notebooks_multi_gpu.py new file mode 100644 index 000000000000..2038ada3a8b2 --- /dev/null +++ b/tests/nightly/straight_dope/test_notebooks_multi_gpu.py @@ -0,0 +1,49 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +#pylint: disable=no-member, too-many-locals, too-many-branches, no-self-use, broad-except, lost-exception, too-many-nested-blocks, too-few-public-methods, invalid-name, missing-docstring +""" + This file tests that the notebooks requiring multi GPUs run without + warning or exception. +""" +import unittest +from straight_dope_test_utils import _test_notebook +from straight_dope_test_utils import _download_straight_dope_notebooks + +class StraightDopeMultiGpuTests(unittest.TestCase): + @classmethod + def setUpClass(self): + assert _download_straight_dope_notebooks() + + # Chapter 7 + + # TODO(vishaalk): module 'mxnet.gluon' has no attribute 'autograd' + #def test_multiple_gpus_scratch(self): + # assert _test_notebook('chapter07_distributed-learning/multiple-gpus-scratch') + + def test_multiple_gpus_gluon(self): + assert _test_notebook('chapter07_distributed-learning/multiple-gpus-gluon') + + # TODO(vishaalk): Do a dry run, and then enable. + #def test_training_with_multiple_machines(self): + # assert _test_notebook('chapter07_distributed-learning/training-with-multiple-machines') + + # Chapter 8 + + # TODO(vishaalk): Module skimage needs to be added to docker image. + # def test_fine_tuning(self): + # assert _test_notebook('chapter08_computer-vision/fine-tuning') diff --git a/tests/nightly/straight_dope/test_notebooks_single_gpu.py b/tests/nightly/straight_dope/test_notebooks_single_gpu.py new file mode 100644 index 000000000000..b87d16cb0a0d --- /dev/null +++ b/tests/nightly/straight_dope/test_notebooks_single_gpu.py @@ -0,0 +1,332 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +#pylint: disable=no-member, too-many-locals, too-many-branches, no-self-use, broad-except, lost-exception, too-many-nested-blocks, too-few-public-methods, invalid-name, missing-docstring +""" + This file tests that the notebooks requiring a single GPU run without + warning or exception. +""" +import glob +import re +import os +import unittest +from straight_dope_test_utils import _test_notebook +from straight_dope_test_utils import _download_straight_dope_notebooks + +NOTEBOOKS_WHITELIST = [ + 'chapter01_crashcourse/preface', + 'chapter01_crashcourse/introduction', + 'chapter01_crashcourse/chapter-one-problem-set', + 'chapter02_supervised-learning/environment', + 'chapter07_distributed-learning/multiple-gpus-scratch', + 'chapter07_distributed-learning/multiple-gpus-gluon', + 'chapter07_distributed-learning/training-with-multiple-machines' +] + + +class StraightDopeSingleGpuTests(unittest.TestCase): + @classmethod + def setUpClass(self): + assert _download_straight_dope_notebooks() + + + def test_completeness(self): + """ + Make sure that every tutorial that isn't in the whitelist is considered for testing by this + file. Exceptions should be added to the whitelist. + N.B. If the test is commented out, then that will be viewed as an intentional disabling of the + test. + """ + # Open up this test file. + with open(__file__, 'r') as f: + notebook_test_text = '\n'.join(f.readlines()) + + notebooks_path = os.path.join(os.path.dirname(__file__), 'straight_dope_book') + notebooks = glob.glob(os.path.join(notebooks_path, '**', '*.ipynb')) + + # Compile a list of notebooks that are tested + tested_notebooks = set(re.findall(r"assert _test_notebook\('(.*)'\)", notebook_test_text)) + + # Ensure each notebook in the straight dope book directory is on the whitelist or is tested. + for notebook in notebooks: + friendly_name = '/'.join(notebook.split('/')[-2:]).split('.')[0] + if friendly_name not in tested_notebooks and friendly_name not in NOTEBOOKS_WHITELIST: + assert False, friendly_name + " has not been added to the nightly/tests/straight_" + \ + "dope/test_notebooks_single_gpu.py test_suite. Consider also adding " + \ + "it to nightly/tests/straight_dope/test_notebooks_multi_gpu.py as " + \ + "well if the notebooks makes use of multiple GPUs." + + def test_ndarray(self): + assert _test_notebook('chapter01_crashcourse/ndarray') + + def test_linear_algebra(self): + assert _test_notebook('chapter01_crashcourse/linear-algebra') + + def test_probability(self): + assert _test_notebook('chapter01_crashcourse/probability') + + # TODO(vishaalk): Notebook contains the word 'Warning'. Needs to be updated to a synonym. + #def test_autograd(self): + # assert _test_notebook('chapter01_crashcourse/autograd') + + # Chapter 2 + + def test_linear_regression_scratch(self): + assert _test_notebook('chapter02_supervised-learning/linear-regression-scratch') + + def test_linear_regression_gluon(self): + assert _test_notebook('chapter02_supervised-learning/linear-regression-gluon') + + # TODO(vishaalk): There is a relative file path needs to be fixed so that the + # python code can be run from another directory. + #def test_logistic_regression_gluon(self): + # assert _test_notebook('chapter02_supervised-learning/logistic-regression-gluon') + + def test_softmax_regression_scratch(self): + assert _test_notebook('chapter02_supervised-learning/softmax-regression-scratch') + + def test_softmax_regression_gluon(self): + assert _test_notebook('chapter02_supervised-learning/softmax-regression-gluon') + + def test_regularization_scratch(self): + assert _test_notebook('chapter02_supervised-learning/regularization-scratch') + + # TODO(vishaalk): Notebook does not appear to be JSON: '{\n "cells": [\n {\n "cell_type": "m.... + #def test_regularization_gluon(self): + # assert _test_notebook('chapter02_supervised-learning/regularization-gluon') + + def test_perceptron(self): + assert _test_notebook('chapter02_supervised-learning/perceptron') + + # Chapter 3 + + def test_mlp_scratch(self): + assert _test_notebook('chapter03_deep-neural-networks/mlp-scratch') + + def test_mlp_gluon(self): + assert _test_notebook('chapter03_deep-neural-networks/mlp-gluon') + + def test_mlp_dropout_scratch(self): + assert _test_notebook('chapter03_deep-neural-networks/mlp-dropout-scratch') + + def test_mlp_dropout_gluon(self): + assert _test_notebook('chapter03_deep-neural-networks/mlp-dropout-gluon') + + def test_plumbing(self): + assert _test_notebook('chapter03_deep-neural-networks/plumbing') + + def test_custom_layer(self): + assert _test_notebook('chapter03_deep-neural-networks/custom-layer') + + #def test_kaggle_gluon_kfold(self): + # assert _test_notebook('chapter03_deep-neural-networks/kaggle-gluon-kfold') + + # TODO(vishaalk): Load params and Save params are deprecated warning. + #def test_serialization(self): + # assert _test_notebook('chapter03_deep-neural-networks/serialization') + + # Chapter 4 + + def test_cnn_scratch(self): + assert _test_notebook('chapter04_convolutional-neural-networks/cnn-scratch') + + def test_cnn_gluon(self): + assert _test_notebook('chapter04_convolutional-neural-networks/cnn-gluon') + + # TODO(vishaalk): Load params and Save params are deprecated warning. + #def test_deep_cnns_alexnet(self): + # assert _test_notebook('chapter04_convolutional-neural-networks/deep-cnns-alexnet') + + def test_very_deep_nets_vgg(self): + assert _test_notebook('chapter04_convolutional-neural-networks/very-deep-nets-vgg') + + def test_cnn_batch_norm_scratch(self): + assert _test_notebook('chapter04_convolutional-neural-networks/cnn-batch-norm-scratch') + + def test_cnn_batch_norm_gluon(self): + assert _test_notebook('chapter04_convolutional-neural-networks/cnn-batch-norm-gluon') + + # Chapter 5 + + # TODO(vishaalk): There is a relative file path needs to be fixed so that the + # python code can be run from another directory. + #def test_simple_rnn(self): + # assert _test_notebook('chapter05_recurrent-neural-networks/simple-rnn') + + # TODO(vishaalk): There is a relative file path needs to be fixed so that the + # python code can be run from another directory. + #def test_lstm_scratch(self): + # assert _test_notebook('chapter05_recurrent-neural-networks/lstm-scratch') + + # TODO(vishaalk): There is a relative file path needs to be fixed so that the + # python code can be run from another directory. + #def test_gru_scratch(self): + # assert _test_notebook('chapter05_recurrent-neural-networks/gru-scratch') + + #def test_rnns_gluon(self): + # assert _test_notebook('chapter05_recurrent-neural-networks/rnns-gluon') + + # Chapter 6 + + def test_optimization_intro(self): + assert _test_notebook('chapter06_optimization/optimization-intro') + + # TODO(vishaalk): RuntimeWarning: Overflow encountered in reduce. + #def test_gd_sgd_scratch(self): + # assert _test_notebook('chapter06_optimization/gd-sgd-scratch') + + # TODO(vishaalk): RuntimeWarning: Overflow encountered in reduce. + #def test_gd_sgd_gluon(self): + # assert _test_notebook('chapter06_optimization/gd-sgd-gluon') + + def test_momentum_scratch(self): + assert _test_notebook('chapter06_optimization/momentum-scratch') + + def test_momentum_gluon(self): + assert _test_notebook('chapter06_optimization/momentum-gluon') + + def test_adagrad_scratch(self): + assert _test_notebook('chapter06_optimization/adagrad-scratch') + + def test_adagrad_gluon(self): + assert _test_notebook('chapter06_optimization/adagrad-gluon') + + def test_rmsprop_scratch(self): + assert _test_notebook('chapter06_optimization/rmsprop-scratch') + + def test_rmsprop_gluon(self): + assert _test_notebook('chapter06_optimization/rmsprop-gluon') + + def test_adadelta_scratch(self): + assert _test_notebook('chapter06_optimization/adadelta-scratch') + + def test_adadelta_gluon(self): + assert _test_notebook('chapter06_optimization/adadelta-gluon') + + def test_adam_scratch(self): + assert _test_notebook('chapter06_optimization/adam-scratch') + + def test_adam_gluon(self): + assert _test_notebook('chapter06_optimization/adam-gluon') + + # Chapter 7 + + def test_hybridize(self): + assert _test_notebook('chapter07_distributed-learning/hybridize') + + + # Chapter 8 + + # TODO(vishaalk): Load params and Save params are deprecated warning. + #def test_object_detection(self): + # assert _test_notebook('chapter08_computer-vision/object-detection') + + # TODO(vishaalk): Module skimage needs to be added to docker image. + #def test_fine_tuning(self): + # assert _test_notebook('chapter08_computer-vision/fine-tuning') + + # TODO(vishaalk): + #def test_visual_question_answer(self): + # assert _test_notebook('chapter08_computer-vision/visual-question-answer') + + # Chapter 9 + + def test_tree_lstm(self): + assert _test_notebook('chapter09_natural-language-processing/tree-lstm') + + # Chapter 11 + + # TODO(vishaalk): Deferred initialization failed because shape cannot be inferred. + #def test_intro_recommender_systems(self): + # assert _test_notebook('chapter11_recommender-systems/intro-recommender-systems') + + # Chapter 12 + + def test_lds_scratch(self): + assert _test_notebook('chapter12_time-series/lds-scratch') + + # TODO(vishaalk): File doesn't appear to be valid JSON. + #def test_issm_scratch(self): + # assert _test_notebook('chapter12_time-series/issm-scratch') + + # TODO(vishaalk): Error: sequential1_batchnorm0_running_mean' has not been initialized + # def test_intro_forecasting_gluon(self): + # assert _test_notebook('chapter12_time-series/intro-forecasting-gluon') + + #def test_intro_forecasting_2_gluon(self): + # assert _test_notebook('chapter12_time-series/intro-forecasting-2-gluon') + + # Chapter 13 + + # TODO(vishaalk): Load params and Save params are deprecated warning. + #def test_vae_gluon(self): + # assert _test_notebook('chapter13_unsupervised-learning/vae-gluon') + + # Chapter 14 + + def test_igan_intro(self): + assert _test_notebook('chapter14_generative-adversarial-networks/gan-intro') + + def test_dcgan(self): + assert _test_notebook('chapter14_generative-adversarial-networks/dcgan') + + def test_generative_adversarial_networks(self): + assert _test_notebook('chapter14_generative-adversarial-networks/conditional') + + # Chapter 16 + + # TODO(vishaalk): Checked failed oshape.Size() != dshape.Size() + #def test_tensor_basics(self): + # assert _test_notebook('chapter16_tensor_methods/tensor_basics') + + # TODO(vishaalk): Notebook does not appear to be valid JSON. + #def test_pixel2pixel(self): + # assert _test_notebook('chapter14_generative-adversarial-networks/pixel2pixel') + + # Chapter 17 + + # TODO(vishaalk): Requires OpenAI Gym. Also uses deprecated load_params. + #def test_dqn(self): +# assert _test_notebook('chapter17_deep-reinforcement-learning/DQN') + +#def test_ddqn(self): +# assert _test_notebook('chapter17_deep-reinforcement-learning/DDQN') + +# Chapter 18 + +#def test_bayes_by_backprop(self): +# assert _test_notebook('chapter18_variational-methods-and-uncertainty/bayes-by-backprop') + +#def test_bayes_by_backprop_gluon(self): +# assert _test_notebook('chapter18_variational-methods-and-uncertainty/bayes-by-backprop-gluon') + +# TODO(vishaalk): AttributeError: 'list' object has no attribute 'keys' +#def test_bayes_by_backprop_rnn(self): +# assert _test_notebook('chapter18_variational-methods-and-uncertainty/bayes-by-backprop-rnn') + +# Chapter 19 + +# TODO(vishaalk): Requires deepchem +#def test_graph_neural_networks(self): +# assert _test_notebook('chapter19_graph-neural-networks/Graph-Neural-Networks') + +# Cheatsheets + +# TODO(vishaalk): There is a relative file path needs to be fixed so that the +# python code can be run from another directory. +#def test_kaggle_gluon_kfold(self): +# assert _test_notebook('cheatsheets/kaggle-gluon-kfold') diff --git a/tests/tutorials/test_tutorials.py b/tests/tutorials/test_tutorials.py index a95e3250cc83..d2d5e6e15937 100644 --- a/tests/tutorials/test_tutorials.py +++ b/tests/tutorials/test_tutorials.py @@ -33,77 +33,29 @@ a clean workspace. """ import os -import warnings -import imp -import shutil -import time -import argparse -import traceback -import nbformat -from nbconvert.preprocessors import ExecutePreprocessor import sys +sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', 'utils')) +from notebook_test import run_notebook -# Maximum 10 minutes per test -# Reaching timeout causes test failure -TIME_OUT = 10*60 -# Pin to ipython version 4 -IPYTHON_VERSION = 4 -temp_dir = 'tmp_notebook' + +TUTORIAL_DIR = os.path.join(os.path.dirname(__file__), '..', '..', 'docs', '_build', 'html', 'tutorials') +KERNEL = os.getenv('MXNET_TUTORIAL_TEST_KERNEL', None) +NO_CACHE = os.getenv('MXNET_TUTORIAL_TEST_NO_CACHE', False) def _test_tutorial_nb(tutorial): - """Run tutorial jupyter notebook to catch any execution error. + """Run tutorial Jupyter notebook to catch any execution error. Parameters ---------- tutorial : str - tutorial name in folder/tutorial format - """ - - tutorial_dir = os.path.join(os.path.dirname(__file__), '..', '..', 'docs', '_build', 'html', 'tutorials') - tutorial_path = os.path.join(*([tutorial_dir] + tutorial.split('/'))) - - # see env variable docs in the doc string of the file - kernel = os.getenv('MXNET_TUTORIAL_TEST_KERNEL', None) - no_cache = os.getenv('MXNET_TUTORIAL_TEST_NO_CACHE', False) - - working_dir = os.path.join(*([temp_dir] + tutorial.split('/'))) - - if no_cache == '1': - print("Cleaning and setting up temp directory '{}'".format(working_dir)) - shutil.rmtree(temp_dir, ignore_errors=True) - - errors = [] - notebook = None - if not os.path.isdir(working_dir): - os.makedirs(working_dir) - try: - notebook = nbformat.read(tutorial_path + '.ipynb', as_version=IPYTHON_VERSION) - # Adding a small delay to allow time for sockets to be freed - # stop-gap measure to battle the 1000ms linger of socket hard coded - # in the kernel API code - time.sleep(1.1) - if kernel is not None: - eprocessor = ExecutePreprocessor(timeout=TIME_OUT, kernel_name=kernel) - else: - eprocessor = ExecutePreprocessor(timeout=TIME_OUT) - nb, _ = eprocessor.preprocess(notebook, {'metadata': {'path': working_dir}}) - except Exception as err: - err_msg = str(err) - errors.append(err_msg) - finally: - if notebook is not None: - output_file = os.path.join(working_dir, "output.txt") - nbformat.write(notebook, output_file) - output_nb = open(output_file, mode='r') - for line in output_nb: - if "Warning:" in line: - errors.append("Warning:\n"+line) - if len(errors) > 0: - print('\n'.join(errors)) - return False - return True + the name of the tutorial to be tested + Returns + ------- + True if there are no warnings or errors. + """ + return run_notebook(tutorial, TUTORIAL_DIR, kernel=KERNEL, no_cache=NO_CACHE) def test_basic_ndarray(): @@ -181,7 +133,7 @@ def test_onnx_inference_on_onnx_model(): def test_python_matrix_factorization(): assert _test_tutorial_nb('python/matrix_factorization') -def test_python_linear_regression() : +def test_python_linear_regression(): assert _test_tutorial_nb('python/linear-regression') def test_python_mnist(): @@ -204,7 +156,7 @@ def test_python_types_of_data_augmentation(): def test_python_profiler(): assert _test_tutorial_nb('python/profiler') - + def test_sparse_row_sparse(): assert _test_tutorial_nb('sparse/row_sparse') @@ -224,4 +176,4 @@ def test_vision_large_scale_classification(): assert _test_tutorial_nb('vision/large_scale_classification') def test_vision_cnn_visualization(): - assert _test_tutorial_nb('vision/cnn_visualization') \ No newline at end of file + assert _test_tutorial_nb('vision/cnn_visualization') diff --git a/tests/utils/notebook_test/__init__.py b/tests/utils/notebook_test/__init__.py new file mode 100644 index 000000000000..cb5282fb4043 --- /dev/null +++ b/tests/utils/notebook_test/__init__.py @@ -0,0 +1,97 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +#pylint: disable=no-member, too-many-locals, too-many-branches, no-self-use, broad-except, lost-exception, too-many-nested-blocks, too-few-public-methods, invalid-name +""" + This file tests provides functionality to test that notebooks run without + warning or exception. +""" +import io +import os +import shutil +import time + +from nbconvert.preprocessors import ExecutePreprocessor +import nbformat + + +IPYTHON_VERSION = 4 # Pin to ipython version 4. +TIME_OUT = 10*60 # Maximum 10 mins/test. Reaching timeout causes test failure. + +def run_notebook(notebook, notebook_dir, kernel=None, no_cache=False, temp_dir='tmp_notebook'): + """Run tutorial Jupyter notebook to catch any execution error. + + Parameters + ---------- + notebook : string + the name of the notebook to be tested + notebook_dir : string + the directory of the notebook to be tested + kernel : string, None + controls which kernel to use when running the notebook. e.g: python2 + no_cache : '1' or False + controls whether to clean the temporary directory in which the + notebook was run and re-download any resource file. The default + behavior is to not clean the directory. Set to '1' to force clean the + directory. + NB: in the real CI, the tests will re-download everything since they + start from a clean workspace. + temp_dir: string + The temporary sub-directory directory in which to run the notebook. + + Returns + ------- + Returns true if the workbook runs with no warning or exception. + """ + + notebook_path = os.path.join(*([notebook_dir] + notebook.split('/'))) + working_dir = os.path.join(*([temp_dir] + notebook.split('/'))) + + if no_cache == '1': + print("Cleaning and setting up temp directory '{}'".format(working_dir)) + shutil.rmtree(temp_dir, ignore_errors=True) + + errors = [] + notebook = None + if not os.path.isdir(working_dir): + os.makedirs(working_dir) + try: + notebook = nbformat.read(notebook_path + '.ipynb', as_version=IPYTHON_VERSION) + # Adding a small delay to allow time for sockets to be freed + # stop-gap measure to battle the 1000ms linger of socket hard coded + # in the kernel API code + time.sleep(1.1) + if kernel is not None: + eprocessor = ExecutePreprocessor(timeout=TIME_OUT, kernel_name=kernel) + else: + eprocessor = ExecutePreprocessor(timeout=TIME_OUT) + nb, _ = eprocessor.preprocess(notebook, {'metadata': {'path': working_dir}}) + except Exception as err: + err_msg = str(err) + errors.append(err_msg) + finally: + if notebook is not None: + output_file = os.path.join(working_dir, "output.txt") + nbformat.write(notebook, output_file) + output_nb = io.open(output_file, mode='r', encoding='utf-8') + for line in output_nb: + if "Warning:" in line: + errors.append("Warning:\n" + line) + if len(errors) > 0: + print('\n'.join(errors)) + return False + return True