diff --git a/.coveragerc b/.coveragerc
new file mode 100644
index 00000000..14112662
--- /dev/null
+++ b/.coveragerc
@@ -0,0 +1,7 @@
+[run]
+plugins =
+ django_coverage_plugin
+omit =
+ venv/*
+ static/*
+ */migrations/*
diff --git a/.flake8 b/.flake8
index 8d74efc7..1495397e 100644
--- a/.flake8
+++ b/.flake8
@@ -1,3 +1,3 @@
[flake8]
max-line-length = 79
-exclude = .git,__pycache__,build,dist,.tox,*/migrations/*,env,__init__.py,settings.py,*/node_modules/*,routing.py
+exclude = .git,__pycache__,build,dist,.tox,*/migrations/*,env,venv,__init__.py,settings.py,*/node_modules/*,routing.py
diff --git a/.gitignore b/.gitignore
index 8cca934a..035e3f04 100644
--- a/.gitignore
+++ b/.gitignore
@@ -39,9 +39,8 @@ MANIFEST
pip-log.txt
pip-delete-this-directory.txt
-# Unit test / coverage reports
+# Unit tests / coverage reports
htmlcov/
-.tox/
.coverage
.coverage.*
.cache
diff --git a/.travis.yml b/.travis.yml
index 7a3e94b4..0cf3ee88 100644
--- a/.travis.yml
+++ b/.travis.yml
@@ -5,7 +5,7 @@ node_js:
install:
- npm install jest@24.8.0 puppeteer@1.17.0 jest-puppeteer@4.3.0 @babel/preset-env@7.4.5 url-join@4.0.0 @babel/core@7.4.5
script:
- - echo -e "Trigger python unit testing in here when they will be ready" || true
+ - echo -e "python unit/integration testing running in geppetto-scidash travis" || true
after_success:
- echo $TRAVIS_BRANCH
- echo $TRAVIS_PULL_REQUEST
diff --git a/Makefile b/Makefile
index c5d7f6de..44cda15c 100644
--- a/Makefile
+++ b/Makefile
@@ -1,3 +1,10 @@
+
+VENV:=$(shell if [ -d "venv" ]; then echo "venv/bin/"; else echo ""; fi)
+PYTHON:=$(VENV)python3
+PIP:=$(VENV)/bin/pip
+MANAGE:="manage.py"
+MANAGECMD=$(PYTHON) $(MANAGE)
+
install: create-db install-sciunit-neuronunit install-frontend install-backend
@echo "==========================="
@echo "= Finished ="
@@ -63,19 +70,19 @@ run-staging: migrate
django-migrate: migrations migrate
migrations:
- ./manage.py makemigrations
+ $(MANAGECMD) makemigrations
migrate:
- ./manage.py migrate
+ $(MANAGECMD) migrate
superuser:
- ./manage.py createsuperuser
+ $(MANAGECMD) createsuperuser
run-django:
- ./manage.py runserver
+ $(MANAGECMD) runserver
run-django-staging:
- python3.6 manage.py runserver --insecure 0.0.0.0:8000
+ $(MANAGECMD) runserver --insecure 0.0.0.0:8000
run-frontend:
cd static/org.geppetto.frontend/src/main/webapp/; npm run build-dev-noTest:watch;
@@ -89,6 +96,18 @@ run-celery-beat:
run-virgo-staging:
/bin/bash /opt/virgo/bin/startup.sh
+manage/%:
+ $(MANAGECMD) $*
+
+run-tests: run-django-tests
+
+run-django-tests:
+ $(MANAGECMD) test
+
+coverage:
+ coverage run $(MANAGE) test
+ coverage report -m
+
lint: flake8-lint isort-lint yapf-lint
format: yapf-format isort-format
diff --git a/README.md b/README.md
index bffc5230..de034475 100644
--- a/README.md
+++ b/README.md
@@ -1,3 +1,5 @@
+[](https://travis-ci.org/MetaCell/scidash)
+[](https://travis-ci.org/MetaCell/scidash)
diff --git a/requirements-dev.txt b/requirements-dev.txt
index 1e835830..deb0384c 100644
--- a/requirements-dev.txt
+++ b/requirements-dev.txt
@@ -1,3 +1,6 @@
flake8
yapf
isort
+coverage
+django_coverage_plugin
+pip-tools
diff --git a/requirements.in b/requirements.in
index 5c1d6477..be31ba8a 100644
--- a/requirements.in
+++ b/requirements.in
@@ -1,5 +1,5 @@
-psycopg2
django==1.11.23
+psycopg2==2.7.7
channels==2.1.2
djangorestframework==3.7.1
drf-writable-nested
@@ -14,3 +14,4 @@ django-celery-beat
django-celery-results
django-db-logger
git+git://github.com/ddelpiano/neuronunit@4.0.0#egg=neuronunit
+wheel==0.33.6
diff --git a/requirements.txt b/requirements.txt
index b36df93f..2221a974 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -2,45 +2,24 @@
# This file is autogenerated by pip-compile
# To update, run:
#
-# pip-compile
+# pip-compile requirements.in
#
-airspeed==0.5.4dev-20150515
-allensdk==0.14.5
amqp==2.4.2 # via kombu
-apipkg==1.5 # via execnet
-argparse==1.4.0 # via scoop
asgiref==2.3.2 # via channels
-asteval==0.9.18 # via lmfit
async-timeout==3.0.1 # via asgiref
-attrs==19.1.0 # via automat, jsonschema, twisted
+attrs==19.1.0 # via automat, twisted
autobahn==19.3.3 # via daphne
automat==0.7.0 # via twisted
-backcall==0.1.0 # via ipython
-backports.tempfile==1.0
-backports.weakref==1.0.post1 # via backports.tempfile
-beautifulsoup4==4.8.2 # via bs4
billiard==3.5.0.5 # via celery
-bleach==3.1.0 # via nbconvert
-git+https://github.com/russelljjarvis/BluePyOpt@scidash#egg=bluepyopt-9999
-bokeh==1.4.0 # via dask
-bs4==0.0.1
-cachetools==0.8.0 # via airspeed
celery[redis]==4.2.2
-cerberus==1.3.2
-certifi==2019.11.28 # via requests
channels==2.1.2
-chardet==3.0.4 # via requests
-click==7.0 # via distributed
-cloudpickle==1.2.2 # via dask, distributed
constantly==15.1.0 # via twisted
-cycler==0.10.0 # via matplotlib
-cypy==0.2.0
daphne==2.2.5 # via channels
-dask[complete]==2.9.1 # via distributed
+dask[complete]==2.9.0 # via distributed
deap==1.3.0
decorator==4.4.1 # via ipyparallel, ipython, networkx, traitlets, validators
defusedxml==0.6.0 # via nbconvert
-distributed==2.9.1 # via dask
+distributed==2.9.0 # via dask
django-celery-beat==1.4.0
django-celery-results==1.0.4
django-db-logger==0.1.7
@@ -53,26 +32,17 @@ django==1.11.23
djangorestframework-jwt==1.11.0
djangorestframework==3.7.1
drf-writable-nested==0.5.1
-efel==3.0.70
-elephant==0.4.1
-entrypoints==0.3 # via nbconvert
-execnet==1.7.1
-fsspec==0.6.2 # via dask
-future==0.18.2 # via allensdk
-gitdb2==2.0.6 # via gitpython
-gitpython==3.0.5
-greenlet==0.4.15 # via scoop
-h5py==2.10.0 # via allensdk
-heapdict==1.0.1 # via zict
+git+git://github.com/ddelpiano/neuronunit@4.0.0#egg=neuronunit
hyperlink==18.0.0 # via twisted
idna==2.8 # via hyperlink, requests
igor==0.3
imageio==2.6.1 # via scikit-image
+importlib-metadata==1.3.0 # via jsonschema
incremental==17.5.0 # via twisted
ipykernel==5.1.3 # via ipyparallel
ipyparallel==6.2.4
ipython-genutils==0.2.0 # via ipyparallel, nbformat, traitlets
-ipython==7.11.1 # via ipykernel, ipyparallel
+ipython==7.10.2 # via ipykernel, ipyparallel
jedi==0.15.2 # via ipython
jinja2==2.10.3 # via allensdk, bokeh, nbconvert
jsonschema==3.2.0 # via nbformat
@@ -89,6 +59,7 @@ lxml==4.4.2 # via libneuroml
markupsafe==1.1.1 # via jinja2
matplotlib==3.1.2 # via allensdk, scikit-image
mistune==0.8.4 # via nbconvert
+more-itertools==8.0.2 # via zipp
msgpack==0.6.2 # via distributed
nbconvert==5.6.1
nbformat==4.4.0 # via nbconvert
@@ -106,54 +77,33 @@ partd==1.1.0 # via dask
patsy==0.5.1 # via statsmodels
pexpect==4.7.0 # via ipython
pickleshare==0.7.5 # via ipython
-pillow==7.0.0 # via bokeh, imageio, scikit-image
+pillow==6.2.1 # via bokeh, imageio, scikit-image
prompt-toolkit==3.0.2 # via ipython
psutil==5.6.7 # via distributed
psycopg2==2.7.7
-ptyprocess==0.6.0 # via pexpect
-pygments==2.5.2 # via ipython, nbconvert
pyhamcrest==1.9.0 # via twisted
pyjwt==1.7.1 # via djangorestframework-jwt
pylems==0.4.9.3
git+https://github.com/rgerkin/pyneuroml@master#egg=pyneuroml-9999
pynn==0.9.5
-pynrrd==0.4.2 # via allensdk
+pynrrd==0.4.1 # via allensdk
pyparsing==2.4.6 # via matplotlib, packaging
pyrsistent==0.15.6 # via jsonschema
python-crontab==2.3.6 # via django-celery-beat
-python-dateutil==2.8.0 # via bokeh, ipyparallel, jupyter-client, matplotlib, pandas, python-crontab
-pytz==2018.9 # via celery, django, django-timezone-field, pandas
-pywavelets==1.1.1 # via scikit-image
-pyyaml==5.2 # via bokeh, dask, distributed
-pyzmq==18.1.1 # via ipyparallel, jupyter-client, scoop
-quantities==0.12.1 # via elephant, neo, pynn
+python-dateutil==2.8.0 # via python-crontab
+pytz==2018.9 # via celery, django, django-timezone-field
redis==2.10.6 # via celery
-requests-toolbelt==0.9.1 # via allensdk
-requests==2.22.0 # via allensdk, requests-toolbelt
rest-framework-cache==0.1
-scikit-image==0.16.2 # via allensdk
-scipy==1.4.1 # via allensdk, elephant, lmfit, scikit-image, statsmodels
-git+https://github.com/scidash/sciunit@dev#egg=sciunit-9999
-scoop==0.7.1.1
-simpleitk==1.2.4 # via allensdk
-simplejson==3.17.0 # via allensdk
-six==1.12.0 # via airspeed, allensdk, autobahn, automat, bleach, bokeh, cycler, django-extensions, efel, elephant, h5py, jsonschema, libneuroml, packaging, patsy, pyhamcrest, pyrsistent, python-dateutil, traitlets, txaio, validators
-smmap2==2.0.5 # via gitdb2
-sortedcontainers==2.1.0 # via distributed
-soupsieve==1.9.5 # via beautifulsoup4
-statsmodels==0.10.2 # via allensdk
-tblib==1.6.0 # via distributed
-testpath==0.4.4 # via nbconvert
-toolz==0.10.0 # via dask, distributed, partd
-tornado==6.0.3 # via bokeh, distributed, ipykernel, ipyparallel, jupyter-client
-traitlets==4.3.3 # via ipykernel, ipyparallel, ipython, jupyter-client, jupyter-core, nbconvert, nbformat
+six==1.12.0 # via autobahn, automat, django-extensions, pyhamcrest, python-dateutil, txaio
twisted==18.9.0 # via daphne
txaio==18.8.1 # via autobahn
-uncertainties==3.1.2 # via lmfit
-urllib3==1.25.7 # via requests
-validators==0.14.1
vine==1.3.0 # via amqp
-wcwidth==0.1.8 # via prompt-toolkit
+wcwidth==0.1.7 # via prompt-toolkit
webencodings==0.5.1 # via bleach
+wheel==0.33.6
zict==1.0.0 # via distributed
+zipp==0.6.0 # via importlib-metadata
zope.interface==4.6.0 # via twisted
+
+# The following packages are considered to be unsafe in a requirements file:
+# setuptools
diff --git a/scidash/general/serializers.py b/scidash/general/serializers.py
index 6a68ddff..16f97446 100644
--- a/scidash/general/serializers.py
+++ b/scidash/general/serializers.py
@@ -1,4 +1,4 @@
-from rest_framework import serializers
+from rest_framework import serializers, fields
from scidash.general.models import Tag
@@ -7,3 +7,48 @@ class TagSerializer(serializers.ModelSerializer):
class Meta:
model = Tag
fields = ('name', )
+
+
+class SerializerWritableMethodField(fields.ModelField):
+ """
+ A writable (ModelField base) SerializerMethodField that get its
+ representation from calling a method on the parent serializer class. The
+ method called will be of the form "get_{field_name}", and should take a
+ single argument, which is the object being serialized.
+
+ For example:
+
+ class ExampleSerializer(self):
+ class_name = SciDashSerializerWritableMethodField(
+ model_field=TestClass()._meta.get_field('class_name'))
+
+ def get_class_name(self, obj):
+ return ... # Calculate some data to return.
+ """
+
+ def __init__(self, method_name=None, **kwargs):
+ self.method_name = method_name
+ super(SerializerWritableMethodField, self).__init__(**kwargs)
+
+ def bind(self, field_name, parent):
+ # In order to enforce a consistent style, we error if a redundant
+ # 'method_name' argument has been used. For example:
+ # my_fld = serializer.SerializerMethodField(method_name='get_my_fld')
+ default_method_name = 'get_{field_name}'.format(field_name=field_name)
+ assert self.method_name != default_method_name, (
+ "It is redundant to specify `%s` on SerializerMethodField '%s' in "
+ "serializer '%s', because it is the same as the default method "
+ "name. Remove the `method_name` argument." %
+ (self.method_name, field_name, parent.__class__.__name__)
+ )
+
+ # The method name should default to `get_{field_name}`.
+ if self.method_name is None:
+ self.method_name = default_method_name
+
+ super(SerializerWritableMethodField, self).bind(
+ field_name, parent)
+
+ def to_representation(self, value):
+ method = getattr(self.parent, self.method_name)
+ return method(value)
diff --git a/scidash/general/tests.py b/scidash/general/tests/__init__.py
similarity index 100%
rename from scidash/general/tests.py
rename to scidash/general/tests/__init__.py
diff --git a/scidash/general/tests/test_geppetto_servlet.py b/scidash/general/tests/test_geppetto_servlet.py
new file mode 100644
index 00000000..82b8df46
--- /dev/null
+++ b/scidash/general/tests/test_geppetto_servlet.py
@@ -0,0 +1,15 @@
+from django.test import TestCase
+
+from pygeppetto_gateway.base import GeppettoServletManager
+
+
+class GeppettoServletTest(TestCase):
+ @classmethod
+ def setUpClass(cls):
+ super(GeppettoServletTest, cls).setUpClass()
+ cls.servlet_manager = GeppettoServletManager()
+
+ def test_ws_address(self):
+ self.assertEqual(
+ self.servlet_manager.host,
+ "ws://scidash-virgo:8080/org.geppetto.frontend/GeppettoServlet")
diff --git a/scidash/sciunitmodels/tests/test_data/score_object.json b/scidash/sciunitmodels/tests/test_data/score_object.json
index 70ad446b..61ded11f 100644
--- a/scidash/sciunitmodels/tests/test_data/score_object.json
+++ b/scidash/sciunitmodels/tests/test_data/score_object.json
@@ -1,11 +1,29 @@
{
- "score_class": {"class_name": "ZScore", "url": "http://test-url.for/not-spaming-data/in-database"},
+ "score_class": {
+ "class_name": "ZScore",
+ "url": "http://test-url.for/not-spaming-data/in-database"
+ },
"hash_id": "111",
"model_instance": {
"model_class": {
"class_name": "ReducedModel",
"url": "http://test-url.for/not-spaming-data/in-database",
+ "import_path": "neuronunit.models.static.StaticModel",
+ "memo": null,
+ "extra_capabilities": [],
"capabilities": [
+ {
+ "class_name": "ReceivesSquareCurrent"
+ },
+ {
+ "class_name": "ProducesActionPotentials"
+ },
+ {
+ "class_name": "ProducesSpikes"
+ },
+ {
+ "class_name": "ProducesMembranePotential"
+ },
{
"class_name": "CanBeReduced"
}
@@ -25,16 +43,18 @@
"test_instance": {
"description": null,
"hash_id": "111",
- "test_suites": [{
- "hash": "testhash",
- "name": "ReducedSuite"
- }],
+ "test_suites": [
+ {
+ "hash": "testhash",
+ "name": "ReducedSuite"
+ }
+ ],
"test_class": {
"class_name": "MyTest",
"url": "http://test-url.for/not-spaming-data/in-database"
},
"observation": {
- "mean":"8",
+ "mean": "8",
"std": "3",
"url": ""
},
diff --git a/scidash/sciunitmodels/tests/test_sciunit_models.py b/scidash/sciunitmodels/tests/test_sciunit_models.py
index d0b825ac..4bc1806c 100644
--- a/scidash/sciunitmodels/tests/test_sciunit_models.py
+++ b/scidash/sciunitmodels/tests/test_sciunit_models.py
@@ -64,7 +64,7 @@ def test_if_capabilities_endpoint_works_correctly(self):
parsed_response = parsed_response.pop()
self.scrub(parsed_response, 'id')
capabilities_data = data.get('model_instance') \
- .get('model_class').get('capabilities').pop()
+ .get('model_class').get('capabilities').pop()
for key in capabilities_data.keys():
self.assertTrue(key in parsed_response)
@@ -86,7 +86,7 @@ def test_if_model_class_endpoint_works_correctly(self):
parsed_response = parsed_response.pop()
self.scrub(parsed_response, 'id')
model_class_data = data.get('model_instance') \
- .get('model_class')
+ .get('model_class')
for key in model_class_data.keys():
self.assertTrue(key in parsed_response)
@@ -118,18 +118,17 @@ def test_if_model_instance_endpoint_works_correctly(self):
class SciunitModelMatchingClassObjects(TestCase):
@classmethod
- def setUpClass(cls):
- super(SciunitModelMatchingClassObjects, cls).setUpClass()
-
- cls.model_class = {
+ def setUp(self):
+ self.model_class = {
"class_name": "ScoreModelClass",
"capabilities": [{
"class_name": "TestCapability"
}],
+ "import_path": "neuronunit.models.static.StaticModel",
"url": "http://test-score.url"
}
- cls.user = ScidashUser.objects.create_user(
+ self.user = ScidashUser.objects.create_user(
'admin', 'a@a.cc', 'montecarlo'
)
@@ -139,12 +138,6 @@ def test_is_model_class_match_the_same_object(self):
model_class_serializer = ModelClassSerializer(data=self.model_class)
- if model_class_serializer.is_valid():
- model_class_serializer.save()
-
- model_class_serializer = None
- model_class_serializer = ModelClassSerializer(data=self.model_class)
-
if model_class_serializer.is_valid():
model_class_serializer.save()
diff --git a/scidash/sciunittests/serializers.py b/scidash/sciunittests/serializers.py
index 97f369dc..ef55915b 100644
--- a/scidash/sciunittests/serializers.py
+++ b/scidash/sciunittests/serializers.py
@@ -2,13 +2,14 @@
import numpy as np
from drf_writable_nested import WritableNestedModelSerializer
-from rest_framework import serializers
+from rest_framework import serializers, fields
import sciunit
from scidash.account.serializers import ScidashUserSerializer
from scidash.general.helpers import import_class
from scidash.general.mixins import GetByKeyOrCreateMixin, GetOrCreateMixin
-from scidash.general.serializers import TagSerializer
+from scidash.general.serializers import TagSerializer, \
+ SerializerWritableMethodField
from scidash.sciunitmodels.serializers import ModelInstanceSerializer
from scidash.sciunittests.helpers import build_destructured_unit
from scidash.sciunittests.models import (
@@ -27,18 +28,18 @@ class Meta:
class TestClassSerializer(
- GetByKeyOrCreateMixin, WritableNestedModelSerializer
-):
- class_name = serializers.SerializerMethodField()
+ GetByKeyOrCreateMixin, WritableNestedModelSerializer):
+ class_name = SerializerWritableMethodField(
+ model_field=TestClass()._meta.get_field('class_name'))
units_name = serializers.CharField(required=False)
key = 'import_path'
def get_class_name(self, obj):
# return class_name + ( first part of import_path )
- return obj.class_name + \
- (' (' +
- '.'.join(obj.import_path.split('.')[0:-1])
- + ')').replace(' ()', '')
+ return obj.class_name + (
+ ' (' +
+ '.'.join((obj.import_path if obj.import_path else ''
+ ).split('.')[0:-1]) + ')').replace(' ()', '')
class Meta:
model = TestClass
diff --git a/scidash/sciunittests/tests/test_data/score_object.json b/scidash/sciunittests/tests/test_data/score_object.json
index 70ad446b..f40e9ba6 100644
--- a/scidash/sciunittests/tests/test_data/score_object.json
+++ b/scidash/sciunittests/tests/test_data/score_object.json
@@ -1,11 +1,34 @@
{
- "score_class": {"class_name": "ZScore", "url": "http://test-url.for/not-spaming-data/in-database"},
+ "score_class": {
+ "class_name": "ZScore",
+ "url": "http://test-url.for/not-spaming-data/in-database"
+ },
"hash_id": "111",
"model_instance": {
"model_class": {
"class_name": "ReducedModel",
"url": "http://test-url.for/not-spaming-data/in-database",
+ "import_path": "neuronunit.models.reduced.ReducedModel",
+ "memo": null,
+ "extra_capabilities": [
+ 24
+ ],
"capabilities": [
+ {
+ "class_name": "Runnable"
+ },
+ {
+ "class_name": "ReceivesSquareCurrent"
+ },
+ {
+ "class_name": "ProducesActionPotentials"
+ },
+ {
+ "class_name": "ProducesSpikes"
+ },
+ {
+ "class_name": "ProducesMembranePotential"
+ },
{
"class_name": "CanBeReduced"
}
@@ -14,6 +37,8 @@
"attributes": {},
"hash_id": "111",
"name": "Izhikevich",
+ "status": "l",
+ "tags": [],
"run_params": {},
"url": "https://github.com/scidash/neuronunit/blob/master/neuronunit/models/NeuroML2/LEMS_2007One.xml",
"backend": "JNeuroML"
@@ -23,21 +48,34 @@
"sort_key": 0.738882680363527,
"score_type": "ZType",
"test_instance": {
+ "name": "Default Name",
"description": null,
"hash_id": "111",
- "test_suites": [{
- "hash": "testhash",
- "name": "ReducedSuite"
- }],
+ "status": "l",
+ "tags": [],
+ "params": null,
+ "verbose": 1,
+ "test_suites": [
+ {
+ "hash": "testhash",
+ "name": "ReducedSuite"
+ }
+ ],
"test_class": {
"class_name": "MyTest",
- "url": "http://test-url.for/not-spaming-data/in-database"
+ "url": "http://test-url.for/not-spaming-data/in-database",
+ "import_path": null,
+ "observation_schema": null,
+ "test_parameters_schema": null,
+ "units": null,
+ "memo": null,
+ "params_units": null,
+ "default_params": null
},
"observation": {
- "mean":"8",
+ "mean": "8",
"std": "3",
"url": ""
- },
- "verbose": 1
+ }
}
}
diff --git a/scidash/sciunittests/tests/test_data/score_objects_list.json b/scidash/sciunittests/tests/test_data/score_objects_list.json
index 9ea540a3..f102176d 100644
--- a/scidash/sciunittests/tests/test_data/score_objects_list.json
+++ b/scidash/sciunittests/tests/test_data/score_objects_list.json
@@ -1,14 +1,37 @@
[
{
- "score_class": {"class_name": "ZScore", "url": "http://test-url.for/not-spaming-data/in-database"},
+ "score_class": {
+ "class_name": "ZScore",
+ "url": "http://test-url.for/not-spaming-data/in-database"
+ },
"hash_id": "111",
"model_instance": {
"model_class": {
"class_name": "ReducedModel_1",
"url": "http://test-url.for/not-spaming-data/in-database-one",
+ "import_path": "neuronunit.models.reduced.ReducedModel",
+ "memo": null,
+ "extra_capabilities": [
+ 14
+ ],
"capabilities": [
{
- "class_name": "CanBeReduced_1"
+ "class_name": "Runnable"
+ },
+ {
+ "class_name": "ReceivesSquareCurrent"
+ },
+ {
+ "class_name": "ProducesActionPotentials"
+ },
+ {
+ "class_name": "ProducesSpikes"
+ },
+ {
+ "class_name": "ProducesMembranePotential"
+ },
+ {
+ "class_name": "CanBeReduced"
}
]
},
@@ -16,6 +39,8 @@
"attributes": {},
"name": "Izhikevich",
"run_params": {},
+ "status": "l",
+ "tags": [],
"url": "https://github.com/scidash/neuronunit/blob/master/neuronunit/models/NeuroML2/LEMS_2007One.xml",
"backend": "JNeuroML"
},
@@ -25,16 +50,18 @@
"score_type": "ZType_1",
"test_instance": {
"description": null,
- "test_suites": [{
- "hash": "testhash",
- "name": "ReducedSuite_1"
- }],
+ "test_suites": [
+ {
+ "hash": "testhash",
+ "name": "ReducedSuite_1"
+ }
+ ],
"test_class": {
"class_name": "MyTest_1",
"url": "http://test-url.for/not-spaming-data/in-database"
},
"observation": {
- "mean":"8",
+ "mean": "8",
"std": "3",
"url": ""
},
@@ -43,15 +70,33 @@
}
},
{
- "score_class": {"class_name": "ZScore", "url": "http://test-url.for/not-spaming-data/in-database"},
+ "score_class": {
+ "class_name": "ZScore",
+ "url": "http://test-url.for/not-spaming-data/in-database"
+ },
"hash_id": "222",
"model_instance": {
"model_class": {
"class_name": "ReducedModel_2",
"url": "http://test-url.for/not-spaming-data/in-database-two",
+ "import_path": "neuronunit.models.static.StaticModel",
+ "memo": null,
+ "extra_capabilities": [],
"capabilities": [
{
- "class_name": "CanBeReduced_2"
+ "class_name": "ReceivesSquareCurrent"
+ },
+ {
+ "class_name": "ProducesActionPotentials"
+ },
+ {
+ "class_name": "ProducesSpikes"
+ },
+ {
+ "class_name": "ProducesMembranePotential"
+ },
+ {
+ "class_name": "CanBeReduced"
}
]
},
@@ -59,6 +104,8 @@
"attributes": {},
"name": "Izhikevich",
"run_params": {},
+ "status": "l",
+ "tags": [],
"url": "https://github.com/scidash/neuronunit/blob/master/neuronunit/models/NeuroML2/LEMS_2007One.xml",
"backend": "JNeuroML"
},
@@ -69,16 +116,18 @@
"test_instance": {
"description": null,
"hash_id": "111",
- "test_suites": [{
- "hash": "testhash",
- "name": "ReducedSuite_2"
- }],
+ "test_suites": [
+ {
+ "hash": "testhash",
+ "name": "ReducedSuite_2"
+ }
+ ],
"test_class": {
"class_name": "MyTest_2",
"url": "http://test-url.for/not-spaming-data/in-database"
},
"observation": {
- "mean":"8",
+ "mean": "8",
"std": "3",
"url": ""
},
@@ -86,13 +135,31 @@
}
},
{
- "score_class": {"class_name": "ZScore", "url": "http://test-url.for/not-spaming-data/in-database"},
+ "score_class": {
+ "class_name": "ZScore",
+ "url": "http://test-url.for/not-spaming-data/in-database"
+ },
"hash_id": "333",
"model_instance": {
"model_class": {
"class_name": "ReducedModel",
"url": "http://test-url.for/not-spaming-data/in-database-three",
+ "import_path": "neuronunit.models.static.StaticModel",
+ "memo": null,
+ "extra_capabilities": [],
"capabilities": [
+ {
+ "class_name": "ReceivesSquareCurrent"
+ },
+ {
+ "class_name": "ProducesActionPotentials"
+ },
+ {
+ "class_name": "ProducesSpikes"
+ },
+ {
+ "class_name": "ProducesMembranePotential"
+ },
{
"class_name": "CanBeReduced"
}
@@ -102,6 +169,8 @@
"hash_id": "311",
"name": "Izhikevich",
"run_params": {},
+ "status": "l",
+ "tags": [],
"url": "https://github.com/scidash/neuronunit/blob/master/neuronunit/models/NeuroML2/LEMS_2007One.xml",
"backend": "JNeuroML"
},
@@ -112,16 +181,18 @@
"test_instance": {
"description": null,
"hash_id": "111",
- "test_suites": [{
- "hash": "testhash",
- "name": "ReducedSuite"
- }],
+ "test_suites": [
+ {
+ "hash": "testhash",
+ "name": "ReducedSuite"
+ }
+ ],
"test_class": {
"class_name": "MyTest",
"url": "http://test-url.for/not-spaming-data/in-database"
},
"observation": {
- "mean":"8",
+ "mean": "8",
"std": "3",
"url": ""
},
diff --git a/scidash/sciunittests/tests/test_sciunit_tests.py b/scidash/sciunittests/tests/test_sciunit_tests.py
index 5a404c13..2c239219 100644
--- a/scidash/sciunittests/tests/test_sciunit_tests.py
+++ b/scidash/sciunittests/tests/test_sciunit_tests.py
@@ -12,12 +12,11 @@
)
SAMPLE_OBJECT = os.path.join(
- os.path.dirname(os.path.dirname(__file__)), 'test_data/score_object.json'
+ os.path.dirname(__file__), 'test_data/score_object.json'
)
SAMPLE_OBJECT_LIST = os.path.join(
- os.path.dirname(os.path.dirname(__file__)),
- 'test_data/score_objects_list.json'
+ os.path.dirname(__file__), 'test_data/score_objects_list.json'
)
@@ -78,8 +77,10 @@ def test_if_scores_endpoint_works_correctly(self):
self.scrub(parsed_response, 'build_info')
self.scrub(parsed_response, 'hostname')
self.scrub(parsed_response, 'owner')
+ self.scrub(parsed_response, 'units_name')
parsed_keys = parsed_response.keys()
+ self.maxDiff=None
for key in data.keys():
self.assertTrue(key in parsed_keys)
self.assertEqual(data.get(key), parsed_response.get(key))
@@ -100,6 +101,7 @@ def test_if_test_instance_endpoint_works_correctly(self):
self.scrub(parsed_response, 'id')
self.scrub(parsed_response, 'timestamp')
self.scrub(parsed_response, 'owner')
+ self.scrub(parsed_response, 'units_name')
parsed_keys = parsed_response.keys()
test_instance_data = data.get('test_instance')
@@ -234,7 +236,7 @@ def test_scores_endpoint_filters_get_by_class_name(self):
parsed_response = response.json()
first_element = parsed_response[2]
model_class_name = first_element.get('model_instance') \
- .get('model_class').get('class_name')
+ .get('model_class').get('class_name')
filtered_url = '{}?model={}'.format(
reverse('score-list'), model_class_name
diff --git a/service/docker/Dockerfile-scidash b/service/docker/Dockerfile-scidash
index 8dcc76a7..412553a4 100644
--- a/service/docker/Dockerfile-scidash
+++ b/service/docker/Dockerfile-scidash
@@ -36,6 +36,9 @@ USER developer
# COPYING PROJECT
WORKDIR $APP_DIR
+# set git email and name for use with git merge
+RUN git config --global user.email "scidash@metacell.us"
+RUN git config --global user.name "SciDash"
RUN $APP_DIR/copy.sh https://github.com/Metacell/scidash.git "${targetBranch}" "${originBranch}" "${defaultBranch}"
diff --git a/service/docker/Dockerfile-virgo b/service/docker/Dockerfile-virgo
index 0ef62bff..abf7aff4 100644
--- a/service/docker/Dockerfile-virgo
+++ b/service/docker/Dockerfile-virgo
@@ -100,48 +100,48 @@ RUN mvn --version
# -== INSTALL GEPPETTO ==-
WORKDIR $SOURCES_DIR
-RUN $APP_DIR/copy.sh http://github.com/openworm/org.geppetto.git "${targetBranch}" "${originBranch}" "${defaultBranch}"
+RUN $APP_DIR/copy.sh http://github.com/openworm/org.geppetto.git geppetto-scidash "${originBranch}" "${defaultBranch}"
RUN cp /git/scidash/service/geppetto/config.json $SOURCES_DIR/org.geppetto/utilities/source_setup
RUN cp /git/scidash/service/geppetto/setup.py $SOURCES_DIR/org.geppetto/utilities/source_setup
RUN cp /git/scidash/service/geppetto/geppetto.plan $SOURCES_DIR/org.geppetto/
-RUN $APP_DIR/copy.sh https://github.com/openworm/org.geppetto.model.git "${targetBranch}" "${originBranch}" "${defaultBranch}" &&\
+RUN $APP_DIR/copy.sh https://github.com/openworm/org.geppetto.model.git geppetto-scidash "${originBranch}" "${defaultBranch}" &&\
cd org.geppetto.model &&\
/bin/echo -e "\e[96mMaven install org.geppetto.model\e[0m" &&\
mvn -Dhttps.protocols=TLSv1.2 -DskipTests --quiet install &&\
rm -rf src && cd ../
-RUN $APP_DIR/copy.sh https://github.com/openworm/org.geppetto.core.git "${targetBranch}" "${originBranch}" "${defaultBranch}" &&\
+RUN $APP_DIR/copy.sh https://github.com/openworm/org.geppetto.core.git geppetto-scidash "${originBranch}" "${defaultBranch}" &&\
cd org.geppetto.core && cp /git/scidash/service/geppetto/core/app-config.xml ./src/main/java/META-INF/spring &&\
/bin/echo -e "\e[96mMaven install org.geppetto.core\e[0m" &&\
mvn -Dhttps.protocols=TLSv1.2 -DskipTests --quiet install &&\
rm -rf src && cd ../
-RUN $APP_DIR/copy.sh https://github.com/openworm/org.geppetto.model.neuroml.git "${targetBranch}" "${originBranch}" "${defaultBranch}" &&\
+RUN $APP_DIR/copy.sh https://github.com/openworm/org.geppetto.model.neuroml.git geppetto-scidash "${originBranch}" "${defaultBranch}" &&\
cd org.geppetto.model.neuroml &&\
/bin/echo -e "\e[96mMaven install org.geppetto.model.neuroml\e[0m" &&\
mvn -Dhttps.protocols=TLSv1.2 -DskipTests --quiet install &&\
rm -rf src && cd ../
-RUN $APP_DIR/copy.sh https://github.com/openworm/org.geppetto.simulation.git "${targetBranch}" "${originBranch}" "${defaultBranch}" &&\
+RUN $APP_DIR/copy.sh https://github.com/openworm/org.geppetto.simulation.git geppetto-scidash "${originBranch}" "${defaultBranch}" &&\
cd org.geppetto.simulation &&\
/bin/echo -e "\e[96mMaven install org.geppetto.simulation\e[0m" &&\
mvn -Dhttps.protocols=TLSv1.2 -DskipTests --quiet install &&\
rm -rf src && cd ../
-RUN $APP_DIR/copy.sh https://github.com/openworm/org.geppetto.simulator.external.git "${targetBranch}" "${originBranch}" "${defaultBranch}" &&\
+RUN $APP_DIR/copy.sh https://github.com/openworm/org.geppetto.simulator.external.git geppetto-scidash "${originBranch}" "${defaultBranch}" &&\
cd org.geppetto.simulator.external &&\
/bin/echo -e "\e[96mMaven install org.geppetto.simulator.external\e[0m" &&\
mvn -Dhttps.protocols=TLSv1.2 -DskipTests --quiet install &&\
rm -rf src && cd ../
-RUN $APP_DIR/copy.sh https://github.com/Metacell/org.geppetto.simulator.scidash.git "${targetBranch}" "${originBranch}" "${defaultBranch}" &&\
+RUN $APP_DIR/copy.sh https://github.com/Metacell/org.geppetto.simulator.scidash.git geppetto-scidash "${originBranch}" "${defaultBranch}" &&\
cd org.geppetto.simulator.scidash &&\
/bin/echo -e "\e[96mMaven install org.geppetto.simulator.scidash\e[0m" &&\
mvn -Dhttps.protocols=TLSv1.2 -DskipTests --quiet install &&\
rm -rf src && cd ../
-RUN $APP_DIR/copy.sh https://github.com/openworm/org.geppetto.frontend.git "${targetBranch}" "${originBranch}" "${defaultBranch}" &&\
+RUN $APP_DIR/copy.sh https://github.com/openworm/org.geppetto.frontend.git geppetto-scidash "${originBranch}" "${defaultBranch}" &&\
cd org.geppetto.frontend && cp /git/scidash/service/geppetto/frontend/app-config.xml ./src/main/webapp/WEB-INF/spring/ &&\
/bin/echo -e "\e[96mMaven install org.geppetto.frontend\e[0m" &&\
mvn -Dhttps.protocols=TLSv1.2 -DskipTests --quiet install &&\
diff --git a/service/docker/build_all.sh b/service/docker/build_all.sh
index 7f0e00f7..da7cf182 100755
--- a/service/docker/build_all.sh
+++ b/service/docker/build_all.sh
@@ -1,5 +1,7 @@
#!/bin/bash
./build_database.sh
+./build_virgo_base.sh
./build_virgo.sh
./build_scidash.sh
+
diff --git a/service/hooks/pre-commit b/service/hooks/pre-commit
index 963e6a18..f8603d23 100644
--- a/service/hooks/pre-commit
+++ b/service/hooks/pre-commit
@@ -3,3 +3,5 @@
set -e
make lint
+
+make run-tests
diff --git a/service/scripts/db-create-psql.sh b/service/scripts/db-create-psql.sh
index 92c4628b..c26decd4 100755
--- a/service/scripts/db-create-psql.sh
+++ b/service/scripts/db-create-psql.sh
@@ -21,4 +21,4 @@ cd /tmp
git clone https://github.com/ddelpiano/scidash-artifacts
cd scidash-artifacts/database
gunzip `ls | head -n 1`
-pg_restore --clean -d scidash `ls *db | head -n 1` || true
+pg_restore --clean --if-exists -d scidash `ls *db | head -n 1` || true