From 74f76b7aeb713b89c5d35aada932791a884e348f Mon Sep 17 00:00:00 2001 From: Stefan Negru Date: Fri, 31 Dec 2021 11:01:47 +0200 Subject: [PATCH 1/4] switch from misspell to pyspelling --- .github/workflows/docs.yml | 7 +- .spellcheck.yml | 27 +++++++ .wordlist.txt | 146 +++++++++++++++++++++++++++++++++++++ dictionary.dic | Bin 0 -> 4960 bytes 4 files changed, 176 insertions(+), 4 deletions(-) create mode 100644 .spellcheck.yml create mode 100644 .wordlist.txt create mode 100644 dictionary.dic diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml index f7999a43..14704d96 100644 --- a/.github/workflows/docs.yml +++ b/.github/workflows/docs.yml @@ -12,10 +12,9 @@ jobs: runs-on: ${{ matrix.os }} steps: - - name: Spell check install - run: curl -L https://git.io/misspell | bash - - name: Spell check docs - run: bin/misspell -error docs/* + - uses: actions/checkout@master + - uses: rojopolis/spellcheck-github-actions@0.20.0 + name: Spellcheck code_docs: strategy: diff --git a/.spellcheck.yml b/.spellcheck.yml new file mode 100644 index 00000000..142e44eb --- /dev/null +++ b/.spellcheck.yml @@ -0,0 +1,27 @@ +matrix: +- name: Markdown + aspell: + lang: en + dictionary: + wordlists: + - .wordlist.txt + encoding: utf-8 + pipeline: + - pyspelling.filters.markdown: + - pyspelling.filters.context: + context_visible_first: true + escapes: '\\[\\`~]' + delimiters: + # Ignore text between inline back ticks as this is code or hightlight words + - open: '(?P`+)' + close: '(?P=open)' + # Ignore surrounded in <> as in RST it is link + - open: '<([A-Za-z0-9-_:.]+)|(https?://[^\\s/$.?#].[^\\s]+|[A-Za-z0-9-_:.]+)' + close: '>' + # Ignore code in RST starting with $ + - open: '\$.+' + close: '' + sources: + - 'docs/*.rst' + - '**/*.md' + default_encoding: utf-8 diff --git a/.wordlist.txt b/.wordlist.txt new file mode 100644 index 00000000..1cb362e4 --- /dev/null +++ b/.wordlist.txt @@ -0,0 +1,146 @@ +AAI +accessType +aiohttp +alleleCount +alleleRequest +alternateBases +alternativeUrl +api +APIBehavior +APITest +apiVersion +assemblyId +automodule +autosummary +beacondb +beaconId +beaconpy +BND +BONA +btree +callCount +cd +chr +CHR +claimName +conf +config +contactUrl +containerPort +createdAt +createDateTime +csc +CSC +cscfi +CSCfi +datafile +datafiles +dataloader +dataset +DATASET +datasetAlleleResponses +datasetHandover +datasetId +datasetIds +datasets +dedent +documentationUrl +ebi +ega +EGA +endMax +endMin +env +ENV +Espoo +examplebrowser +exampleid +externalUrl +fi +FIDE +finland +ga +genomic +genomoe +gh +GH +GHBeacon +github +GRCh +Gunicorn +GunicornUVLoopWebWorker +handoverType +hg +hostPath +html +http +HttpLocust +https +ICT +ietf +imagePullPolicy +includeDatasetResponses +ini +init +io +javascript +jpg +json +JSON +JWK +jwt +JWT +Keilaranta +literalinclude +localhost +logoUrl +matchLabels +mateID +mateName +mateStart +mountPath +namespace +NodePort +OAuth +orgInfo +persistentVolumeClaim +pgtune +postgres +POSTGRES +py +readthedocs +referenceBases +referenceID +referenceName +restartPolicy +rfc +RGB +sampleAlleleRequests +sampleCount +schemas +secretKeyRef +SNP +sql +startMax +startMin +targetPort +TaskSet +TCP +toctree +txt +ua +uk +updatedAt +updateDateTime +uri +url +utils +valited +valueFrom +variantCount +varianttype +variantType +vcf +volumeMounts +welcomeUrl +www \ No newline at end of file diff --git a/dictionary.dic b/dictionary.dic new file mode 100644 index 0000000000000000000000000000000000000000..63cff1c64501f01b028f75969589eeede5bf2cc8 GIT binary patch literal 4960 zcmai1Ym6jS6+YWl)z#h8Q#~`?Jv}q*F0&{uG)Q=ffa1>d?DWjg)xAwu?aqJ(t?9lq zUAxs?O;z>mY+_;zBnXl$7$1=sH3UO6h!7+E5PtYTV)TawNK}-lF$PEwN$?R##P8g$ z*=&5o%G7*)zkBYv=bn4+Ik%i}AUtorBetBO7tPC-2<8L-v^Rg%6<1wJ|L18qPydI+ z<6s`28qq)7NAz2j=>8nh1E8LZr9qQ|SI3KPGVty_^P*3KuHP3C!Gt|_gypzdULax} zXb}|{ZK7#l1>-MZg#3O+@CSiaycaTp zpJxPr2)GLQix|PbixK+X&4~Vsf#{~gj0;4UFhbF#jOgGs)L`djj2M3fBka4H5%yif z2z~EkgueF!O^myq5$zj*b&R`_5$z8!ZV-J4*uc7vFoItILM<&aVmw?$P3T<$wqVaP zBls0Y@T-g%w+6Jp-^>Vo_#+GR;g2lz9|PLxXWcf&fg&6Kjsw@h-^z&f-NuOZ+z#9T zeiZoc5Bp*sFYhO>llPPD;{CiuVuMafwCLj!ZTcM}^#4hsN&k>&(|h-Uuh(dc5ysvn zu}b$#tkJ^~ZTdMQj8_wk6`GJ(rNa_!x``3?-pL3DoR!$5M;PrYJt@(oA4;s#3lf|3 znna7<23D}KQwVU5r`sjkbc)e#(JqkV>Tbq{Ne?i>e-8sWuAX4T20qOQhrh^Z*XdP> zP5L_!Vsr%#PHmw=*E3cvT4hAX#~2YXp2RwRQeulf&)BHaqY`WM6Nz=wHRf$9Ga^tv z%!okfO5C6?G9n-zk=URY7-8>gKofziBsnfyR0h_eXNA#j&_^U%aF2)U$J_~Hr zEqavEZqoOF97``SLh;LtmPKz!w5fz7(W=(ydWkJsW~^1{bByrMLyYzUJp;60&x;c4 z^m~addW*5%BqPOf+@v}q0`@kbU8~cb5}WjpM2o&95&mbyy8pn4egB(8lP*kiyFu4V zv}hT)jsyM*qgkcDNUTvN!|giFOSI`m##Vz&AfJc(fdA_$5|xsPl~pp5iR~^KW6J5% zr)hd$nRH!QTwG(`^}#2G4vi>3*=i*5C5!V7gp`m&^)MfwA8ZyH0Q5iA_|3Pz2C1>b1$3PRD(2 z2R4GXu!rhQ#%Z@j7}{qBf@WCSZkQ&i#WArPiJy59dkTScw)&P_9L3p!MC7{)cxb5j+VG%D8h&iq{^nx0mh-T3JVUL>vMMm8-aoKTGMQr7&w!2d(O6K4n42wdv5y-Th)M*6u0kEUMa&# z?7zkdGG-Og-=WKMm9)~>Vsot`y3UU42UG-I z-N2yf45UtoAawmc<-x)z(h53+ZFbRg5)>Y+Gd&!1oJe#QBif%Du}CRu$r$mlT-a_; z@boPhj7@QQ)2({NycOdM87B-yZ__)o*5OrIP117OGS=zSxWZY{$f>S~wkW5TZFtt0 z)P|Akg>0-{x3SSsF^v44-x+#>CNKue%!rY?@LSUBwUoZt*{C}&bF zpITY12&c`-4%vE|Z~h6K$!IH7O3eb4E4par>FA6KhuN0%v+K*Bb1iS9Nc z7&*Htk3zN!L4QXC@-XBzB=P0~srgjb52L1oGis$pQO={at%;UFZ6AX|{s3YnU4JtuJb z5%P$_2?V>2P1&Le6)l)EndGLsjXkL~c<1I)E5p9q_JclMGR9QZY_3-@zjmm!*K_=Y z>HC9qvAOOCr;uzWp}G!t=-Z-?fDy=6AfUZ4V%#Pl;= zO9h3?A=1Kbqt}e(f%|7Y>ZK7i?sAq%Z_kC+doh-)7_ZQi3oE@9+@G zRt;Alqo{}NuIM=-&FVO*SRIP-Ts9O4yr?G5w8R$opdQOJdSEP+<#=>TFCOELQC)-~ z2MeRlMOioF@+jv;p42B4zVj55Fk6Ih3D+DO`yf_6F{y>f8ht zfL*}xicn!V8ncRxPZgw1d#NcKSW8Zcc2R3IoJT%+#SEOXE#MYJ)1(R(sWq=hep@Ej zDGhfjgtIoK+fI1O!dG5i#r29SL<%L!*>X`I3K~;TUP$k82sy}cz!x*jdA`#@vX}tL z8M=_{hz?Ap13GH+0&+1HkYhnq~ z$31(V-7D*CorO=I*kVk;b&ff#e_C1$4rORMDM@TnLE0=?-Wt9y!f`w8Ei4t6#ufsy znBH-`p{NFa58Bqv8Y%?tRi}m5_IvW?GOOc0iQVDDnB9>lq|OICgrC=OC*in)4rGH4 zA{BQ{TSm`JcE<@^WbHV7H126qAvYHDvQ8O=y=KIBxakD$sGQ?gv3M6ZSrZ{+b;)R2joFt!1vZHzKK+DEb~v?kDo#N8hn4eiazGGE{9C-JlpO=-`8=6=f1ajUiAGa zmY{TaN!#CtVtxFPn-o~|#)>pKD+_d@<|j1!Q%H29-9pK)K?K=XlgZ>}mwb1u0^xN@zp9K!l?_)1EG3GG( z*75rgbiIV%S3(DWquvzH`2)!8*|dwXU%}WW^xgt_6>A6}|6R!Dq2mDB9n8n_>+$$W z$bSvLAA_C`#_OzNj0OIuvAs`W-1BJHAoE5nvxWBFyuZd=k3qJAIqqS-u;WR*Uxj=J txF@%V55k5f^gIvQPeJDO^JM;t_Gh5yIp}2hC*t||_n*=C3)sTXzX03?qiX;F literal 0 HcmV?d00001 From 908bb9a78155e9ae33fb6835fe88455d9e64406a Mon Sep 17 00:00:00 2001 From: Stefan Negru Date: Fri, 31 Dec 2021 11:11:15 +0200 Subject: [PATCH 2/4] introduce black formatting checks --- .github/workflows/style.yml | 4 +- beacon_api/__init__.py | 8 +- beacon_api/conf/config.py | 38 ++++--- docs/conf.py | 72 ++++++------ tests/test_basic.py | 218 +++++++++++++++++++----------------- tests/test_data_query.py | 182 +++++++++++++++++++----------- tests/test_db_load.py | 136 +++++++++++----------- tox.ini | 10 +- 8 files changed, 374 insertions(+), 294 deletions(-) diff --git a/.github/workflows/style.yml b/.github/workflows/style.yml index ee9ac8bd..316bd6f8 100644 --- a/.github/workflows/style.yml +++ b/.github/workflows/style.yml @@ -26,9 +26,11 @@ jobs: run: tox -e flake8 - name: Do bandit static check with tox run: tox -e bandit + - name: Black formatting check + run: tox -e black - name: Install libcurl-devel run: | sudo apt-get update sudo apt-get install libcurl4-openssl-dev - - name: Do typing check with tox + - name: Type hints check run: tox -e mypy diff --git a/beacon_api/__init__.py b/beacon_api/__init__.py index efd42324..7d95534d 100644 --- a/beacon_api/__init__.py +++ b/beacon_api/__init__.py @@ -24,7 +24,7 @@ __url__ = CONFIG_INFO.url __alturl__ = CONFIG_INFO.alturl __createtime__ = CONFIG_INFO.createtime -__updatetime__ = datetime.datetime.now().strftime('%Y-%m-%dT%H:%M:%SZ') # Every restart of the application means an update to it +__updatetime__ = datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%SZ") # Every restart of the application means an update to it __org_id__ = CONFIG_INFO.org_id __org_name__ = CONFIG_INFO.org_name @@ -33,12 +33,10 @@ __org_welcomeUrl__ = CONFIG_INFO.org_welcomeUrl __org_contactUrl__ = CONFIG_INFO.org_contactUrl __org_logoUrl__ = CONFIG_INFO.org_logoUrl -__org_info__ = {'orgInfo': CONFIG_INFO.org_info} +__org_info__ = {"orgInfo": CONFIG_INFO.org_info} __sample_queries__ = SAMPLE_QUERIES # GA4GH Discovery -__service_type__ = {'group': f'{CONFIG_INFO.service_group}', - 'artifact': f'{CONFIG_INFO.service_artifact}', - 'version': f'{__apiVersion__}'} +__service_type__ = {"group": f"{CONFIG_INFO.service_group}", "artifact": f"{CONFIG_INFO.service_artifact}", "version": f"{__apiVersion__}"} __service_env__ = CONFIG_INFO.environment diff --git a/beacon_api/conf/config.py b/beacon_api/conf/config.py index 2570523a..706f7b5c 100644 --- a/beacon_api/conf/config.py +++ b/beacon_api/conf/config.py @@ -18,7 +18,7 @@ import asyncpg from typing import Awaitable -DB_SCHEMA = os.environ.get('DATABASE_SCHEMA', None) +DB_SCHEMA = os.environ.get("DATABASE_SCHEMA", None) async def init_db_pool() -> Awaitable: @@ -26,20 +26,22 @@ async def init_db_pool() -> Awaitable: As we will have frequent requests to the database it is recommended to create a connection pool. """ - return await asyncpg.create_pool(host=os.environ.get('DATABASE_URL', 'localhost'), - port=os.environ.get('DATABASE_PORT', '5432'), - user=os.environ.get('DATABASE_USER', 'beacon'), - password=os.environ.get('DATABASE_PASSWORD', 'beacon'), - database=os.environ.get('DATABASE_NAME', 'beacondb'), - # Multiple schemas can be used, and they need to be comma separated - server_settings={'search_path': DB_SCHEMA if DB_SCHEMA else 'public'}, - # initializing with 0 connections allows the web server to - # start and also continue to live - min_size=0, - # for now limiting the number of connections in the pool - max_size=20, - max_queries=50000, - timeout=120, - command_timeout=180, - max_cached_statement_lifetime=0, - max_inactive_connection_lifetime=180) + return await asyncpg.create_pool( + host=os.environ.get("DATABASE_URL", "localhost"), + port=os.environ.get("DATABASE_PORT", "5432"), + user=os.environ.get("DATABASE_USER", "beacon"), + password=os.environ.get("DATABASE_PASSWORD", "beacon"), + database=os.environ.get("DATABASE_NAME", "beacondb"), + # Multiple schemas can be used, and they need to be comma separated + server_settings={"search_path": DB_SCHEMA if DB_SCHEMA else "public"}, + # initializing with 0 connections allows the web server to + # start and also continue to live + min_size=0, + # for now limiting the number of connections in the pool + max_size=20, + max_queries=50000, + timeout=120, + command_timeout=180, + max_cached_statement_lifetime=0, + max_inactive_connection_lifetime=180, + ) diff --git a/docs/conf.py b/docs/conf.py index 56480723..7fe552d4 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -10,7 +10,7 @@ # Get the project root dir, which is the parent dir of this -sys.path.insert(0, os.path.abspath('../beacon_api')) +sys.path.insert(0, os.path.abspath("../beacon_api")) # -- General configuration ------------------------------------------------ @@ -25,9 +25,19 @@ def __getattr__(cls, name): # List modules need to be mocked -MOCK_MODULES = ['aiohttp', 'asyncpg', 'cyvcf2', 'aiohttp_cors', - 'Cython', 'numpy', 'authlib.jose', 'authlib.jose.errors', - 'uvloop', 'aiocache', 'aiocache.serializers'] +MOCK_MODULES = [ + "aiohttp", + "asyncpg", + "cyvcf2", + "aiohttp_cors", + "Cython", + "numpy", + "authlib.jose", + "authlib.jose.errors", + "uvloop", + "aiocache", + "aiocache.serializers", +] sys.modules.update((mod_name, Mock()) for mod_name in MOCK_MODULES) # If your documentation needs a minimal Sphinx version, state it here. @@ -37,32 +47,34 @@ def __getattr__(cls, name): # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. -extensions = ['sphinx.ext.autodoc', - 'sphinx.ext.autosummary', - 'sphinx.ext.coverage', - 'sphinx.ext.ifconfig', - 'sphinx.ext.viewcode', - 'sphinx.ext.githubpages', - 'sphinx.ext.todo'] +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.autosummary", + "sphinx.ext.coverage", + "sphinx.ext.ifconfig", + "sphinx.ext.viewcode", + "sphinx.ext.githubpages", + "sphinx.ext.todo", +] # Add any paths that contain templates here, relative to this directory. -templates_path = ['templates'] +templates_path = ["templates"] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # # source_suffix = ['.rst', '.md'] -source_suffix = '.rst' +source_suffix = ".rst" # The master toctree document. -master_doc = 'index' +master_doc = "index" # Get current year current_year = str(datetime.date.today().year) # General information about the project. -project = 'beacon-python' -copyright = f'2018 - {current_year}, {beacon_api.__copyright__} |' +project = "beacon-python" +copyright = f"2018 - {current_year}, {beacon_api.__copyright__} |" author = beacon_api.__author__ # The version info for the project you're documenting, acts as replacement for @@ -84,10 +96,10 @@ def __getattr__(cls, name): # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This patterns also affecst html_static_path and html_extra_path -exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] +exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] # The name of the Pygments (syntax highlighting) style to use. -pygments_style = 'sphinx' +pygments_style = "sphinx" # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = False @@ -97,22 +109,18 @@ def __getattr__(cls, name): # -- Options for HTML output ---------------------------------------------- -html_title = 'beacon-python API' +html_title = "beacon-python API" # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. -html_theme = 'sphinx_rtd_theme' -html_theme_options = { - 'collapse_navigation': True, - 'sticky_navigation': True, - 'display_version': True, - 'prev_next_buttons_location': 'bottom'} +html_theme = "sphinx_rtd_theme" +html_theme_options = {"collapse_navigation": True, "sticky_navigation": True, "display_version": True, "prev_next_buttons_location": "bottom"} # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ['static'] +html_static_path = ["static"] # Custom sidebar templates, must be a dictionary that maps document names # to template names. @@ -120,7 +128,7 @@ def __getattr__(cls, name): # This is required for the alabaster theme # refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars html_sidebars = { - '**': [ + "**": [ # 'about.html', # 'navigation.html', # 'relations.html', # needs 'show_related': True theme option to display @@ -129,15 +137,15 @@ def __getattr__(cls, name): ] } -today_fmt = '%B %d, %Y' +today_fmt = "%B %d, %Y" def setup(app): """Add custom stylesheet.""" - app.add_css_file('custom.css') + app.add_css_file("custom.css") # -- Other stuff ---------------------------------------------------------- -htmlhelp_basename = 'beacon-python' -man_pages = [(master_doc, 'beacon-python', [author], 1)] -texinfo_documents = [(master_doc, 'beacon-python', author, 'Miscellaneous')] +htmlhelp_basename = "beacon-python" +man_pages = [(master_doc, "beacon-python", [author], 1)] +texinfo_documents = [(master_doc, "beacon-python", author, "Miscellaneous")] diff --git a/tests/test_basic.py b/tests/test_basic.py index 91c1232c..d2fd637b 100644 --- a/tests/test_basic.py +++ b/tests/test_basic.py @@ -13,9 +13,7 @@ def mock_token(bona_fide, permissions, auth): """Mock a processed token.""" - return {"bona_fide_status": bona_fide, - "permissions": permissions, - "authenticated": auth} + return {"bona_fide_status": bona_fide, "permissions": permissions, "authenticated": auth} class MockDecodedPassport: @@ -53,7 +51,7 @@ async def close(self): async def check_tables(self, array): """Mimic check_tables.""" - return ['DATASET1', 'DATASET2'] + return ["DATASET1", "DATASET2"] async def create_tables(self, sql_file): """Mimic create_tables.""" @@ -85,21 +83,21 @@ def tearDown(self): def test_parser(self): """Test argument parsing.""" - parsed = parse_arguments(['/path/to/datafile.csv', '/path/to/metadata.json']) - self.assertEqual(parsed.datafile, '/path/to/datafile.csv') - self.assertEqual(parsed.metadata, '/path/to/metadata.json') + parsed = parse_arguments(["/path/to/datafile.csv", "/path/to/metadata.json"]) + self.assertEqual(parsed.datafile, "/path/to/datafile.csv") + self.assertEqual(parsed.metadata, "/path/to/metadata.json") - @asynctest.mock.patch('beacon_api.conf.config.asyncpg') + @asynctest.mock.patch("beacon_api.conf.config.asyncpg") async def test_init_pool(self, db_mock): """Test database connection pool creation.""" - db_mock.return_value = asynctest.CoroutineMock(name='create_pool') + db_mock.return_value = asynctest.CoroutineMock(name="create_pool") db_mock.create_pool = asynctest.CoroutineMock() await init_db_pool() db_mock.create_pool.assert_called() - @asynctest.mock.patch('beacon_api.utils.db_load.LOG') - @asynctest.mock.patch('beacon_api.utils.db_load.BeaconDB') - @asynctest.mock.patch('beacon_api.utils.db_load.VCF') + @asynctest.mock.patch("beacon_api.utils.db_load.LOG") + @asynctest.mock.patch("beacon_api.utils.db_load.BeaconDB") + @asynctest.mock.patch("beacon_api.utils.db_load.VCF") async def test_init_beacon_db(self, mock_vcf, db_mock, mock_log): """Test beacon_init db call.""" db_mock.return_value = MockBeaconDB() @@ -110,14 +108,13 @@ async def test_init_beacon_db(self, mock_vcf, db_mock, mock_log): "sampleCount": 2504, "externalUrl": "https://datasethost.org/dataset1", "accessType": "PUBLIC"}""" - metafile = self._dir.write('data.json', metadata.encode('utf-8')) + metafile = self._dir.write("data.json", metadata.encode("utf-8")) data = """MOCK VCF file""" - datafile = self._dir.write('data.vcf', data.encode('utf-8')) + datafile = self._dir.write("data.vcf", data.encode("utf-8")) await init_beacon_db([datafile, metafile]) - mock_log.info.mock_calls = ['Mark the database connection to be closed', - 'The database connection has been closed'] + mock_log.info.mock_calls = ["Mark the database connection to be closed", "The database connection has been closed"] - @asynctest.mock.patch('beacon_api.utils.db_load.init_beacon_db') + @asynctest.mock.patch("beacon_api.utils.db_load.init_beacon_db") def test_main_db(self, mock_init): """Test run asyncio main beacon init.""" main() @@ -126,18 +123,18 @@ def test_main_db(self, mock_init): def test_aud_claim(self): """Test aud claim function.""" env = EnvironmentVarGuard() - env.set('JWT_AUD', "aud1,aud2") + env.set("JWT_AUD", "aud1,aud2") result = verify_aud_claim() # Because it is false we expect it not to be parsed expected = (False, []) self.assertEqual(result, expected) - env.unset('JWT_AUD') + env.unset("JWT_AUD") def test_token_scheme_check_bad(self): """Test token scheme no token.""" # This might never happen, yet lets prepare for it with self.assertRaises(aiohttp.web_exceptions.HTTPUnauthorized): - token_scheme_check("", 'https', {}, 'localhost') + token_scheme_check("", "https", {}, "localhost") def test_access_resolution_base(self): """Test assumptions for access resolution. @@ -146,9 +143,9 @@ def test_access_resolution_base(self): """ request = PARAMS token = mock_token(False, [], False) - host = 'localhost' + host = "localhost" result = access_resolution(request, token, host, ["1", "2"], ["3", "4"], ["5", "6"]) - self.assertListEqual(result[0], ['PUBLIC']) + self.assertListEqual(result[0], ["PUBLIC"]) intermediate_list = result[1] intermediate_list.sort() self.assertListEqual(["1", "2"], intermediate_list) @@ -160,9 +157,9 @@ def test_access_resolution_no_controlled(self): """ request = PARAMS token = mock_token(False, [], True) - host = 'localhost' + host = "localhost" result = access_resolution(request, token, host, ["1", "2"], ["3", "4"], ["5", "6"]) - self.assertListEqual(result[0], ['PUBLIC']) + self.assertListEqual(result[0], ["PUBLIC"]) intermediate_list = result[1] intermediate_list.sort() self.assertListEqual(["1", "2"], intermediate_list) @@ -174,9 +171,9 @@ def test_access_resolution_registered(self): """ request = PARAMS token = mock_token(True, [], True) - host = 'localhost' + host = "localhost" result = access_resolution(request, token, host, ["1", "2"], ["3", "4"], ["5", "6"]) - self.assertListEqual(result[0], ['PUBLIC', 'REGISTERED']) + self.assertListEqual(result[0], ["PUBLIC", "REGISTERED"]) intermediate_list = result[1] intermediate_list.sort() self.assertListEqual(["1", "2", "3", "4"], intermediate_list) @@ -188,9 +185,9 @@ def test_access_resolution_controlled_no_registered(self): """ request = PARAMS token = mock_token(False, ["5", "6"], True) - host = 'localhost' + host = "localhost" result = access_resolution(request, token, host, ["1", "2"], ["3", "4"], ["5", "6"]) - self.assertListEqual(result[0], ['PUBLIC', 'CONTROLLED']) + self.assertListEqual(result[0], ["PUBLIC", "CONTROLLED"]) intermediate_list = result[1] intermediate_list.sort() self.assertListEqual(["1", "2", "5", "6"], intermediate_list) @@ -202,9 +199,9 @@ def test_access_resolution_controlled_registered(self): """ request = PARAMS token = mock_token(True, ["5", "6"], True) - host = 'localhost' + host = "localhost" result = access_resolution(request, token, host, ["1", "2"], ["3", "4"], ["5", "6"]) - self.assertListEqual(result[0], ['PUBLIC', 'REGISTERED', 'CONTROLLED']) + self.assertListEqual(result[0], ["PUBLIC", "REGISTERED", "CONTROLLED"]) intermediate_list = result[1] intermediate_list.sort() self.assertListEqual(["1", "2", "3", "4", "5", "6"], intermediate_list) @@ -216,7 +213,7 @@ def test_access_resolution_bad_registered(self): """ request = PARAMS token = mock_token(False, [], False) - host = 'localhost' + host = "localhost" with self.assertRaises(aiohttp.web_exceptions.HTTPUnauthorized): access_resolution(request, token, host, [], ["3"], []) @@ -227,7 +224,7 @@ def test_access_resolution_no_registered2(self): """ request = PARAMS token = mock_token(False, [], True) - host = 'localhost' + host = "localhost" with self.assertRaises(aiohttp.web_exceptions.HTTPForbidden): access_resolution(request, token, host, [], ["4"], []) @@ -238,7 +235,7 @@ def test_access_resolution_controlled_forbidden(self): """ request = PARAMS token = mock_token(False, [7], True) - host = 'localhost' + host = "localhost" with self.assertRaises(aiohttp.web_exceptions.HTTPForbidden): access_resolution(request, token, host, [], ["6"], []) @@ -249,7 +246,7 @@ def test_access_resolution_controlled_unauthorized(self): """ request = PARAMS token = mock_token(False, [], False) - host = 'localhost' + host = "localhost" with self.assertRaises(aiohttp.web_exceptions.HTTPUnauthorized): access_resolution(request, token, host, [], ["5"], []) @@ -260,9 +257,9 @@ def test_access_resolution_controlled_no_perms(self): """ request = PARAMS token = mock_token(False, ["7"], True) - host = 'localhost' + host = "localhost" result = access_resolution(request, token, host, ["2"], ["6"], []) - self.assertEqual(result, (['PUBLIC'], ["2"])) + self.assertEqual(result, (["PUBLIC"], ["2"])) def test_access_resolution_controlled_some(self): """Test assumptions for access resolution for requested controlled some datasets. @@ -271,9 +268,9 @@ def test_access_resolution_controlled_some(self): """ request = PARAMS token = mock_token(False, ["5"], True) - host = 'localhost' + host = "localhost" result = access_resolution(request, token, host, [], [], ["5", "6"]) - self.assertEqual(result, (['CONTROLLED'], ["5"])) + self.assertEqual(result, (["CONTROLLED"], ["5"])) def test_access_resolution_controlled_no_perms_public(self): """Test assumptions for access resolution for requested controlled and public, returning public only. @@ -282,9 +279,9 @@ def test_access_resolution_controlled_no_perms_public(self): """ request = PARAMS token = mock_token(False, [], False) - host = 'localhost' + host = "localhost" result = access_resolution(request, token, host, ["1"], [], ["5"]) - self.assertEqual(result, (['PUBLIC'], ["1"])) + self.assertEqual(result, (["PUBLIC"], ["1"])) def test_access_resolution_controlled_no_perms_bonafide(self): """Test assumptions for access resolution for requested controlled and registered, returning registered only. @@ -293,9 +290,9 @@ def test_access_resolution_controlled_no_perms_bonafide(self): """ request = PARAMS token = mock_token(True, [], True) - host = 'localhost' + host = "localhost" result = access_resolution(request, token, host, [], ["4"], ["7"]) - self.assertEqual(result, (['REGISTERED'], ["4"])) + self.assertEqual(result, (["REGISTERED"], ["4"])) def test_access_resolution_controlled_never_reached(self): """Test assumptions for access resolution for requested controlled unauthorized. @@ -305,7 +302,7 @@ def test_access_resolution_controlled_never_reached(self): """ request = PARAMS token = mock_token(False, None, False) - host = 'localhost' + host = "localhost" with self.assertRaises(aiohttp.web_exceptions.HTTPUnauthorized): access_resolution(request, token, host, [], [], ["8"]) @@ -317,57 +314,80 @@ def test_access_resolution_controlled_never_reached2(self): """ request = PARAMS token = mock_token(False, None, True) - host = 'localhost' + host = "localhost" with self.assertRaises(aiohttp.web_exceptions.HTTPForbidden): access_resolution(request, token, host, [], [], ["8"]) - @asynctest.mock.patch('beacon_api.permissions.ga4gh.validate_passport') + @asynctest.mock.patch("beacon_api.permissions.ga4gh.validate_passport") async def test_ga4gh_controlled(self, m_validation): """Test ga4gh permissions claim parsing.""" # Test: no passports, no permissions datasets = await get_ga4gh_controlled([]) self.assertEqual(datasets, set()) # Test: 1 passport, 1 unique dataset, 1 permission - passport = {"ga4gh_visa_v1": {"type": "ControlledAccessGrants", - "value": "https://institution.org/EGAD01", - "source": "https://ga4gh.org/duri/no_org", - "by": "self", - "asserted": 1539069213, - "expires": 4694742813}} + passport = { + "ga4gh_visa_v1": { + "type": "ControlledAccessGrants", + "value": "https://institution.org/EGAD01", + "source": "https://ga4gh.org/duri/no_org", + "by": "self", + "asserted": 1539069213, + "expires": 4694742813, + } + } m_validation.return_value = passport dataset = await get_ga4gh_controlled([{}]) # one passport - self.assertEqual(dataset, {'EGAD01'}) + self.assertEqual(dataset, {"EGAD01"}) # Test: 2 passports, 1 unique dataset, 1 permission (permissions must not be duplicated) - passport = {"ga4gh_visa_v1": {"type": "ControlledAccessGrants", - "value": "https://institution.org/EGAD01", - "source": "https://ga4gh.org/duri/no_org", - "by": "self", - "asserted": 1539069213, - "expires": 4694742813}} + passport = { + "ga4gh_visa_v1": { + "type": "ControlledAccessGrants", + "value": "https://institution.org/EGAD01", + "source": "https://ga4gh.org/duri/no_org", + "by": "self", + "asserted": 1539069213, + "expires": 4694742813, + } + } m_validation.return_value = passport dataset = await get_ga4gh_controlled([{}, {}]) # two passports - self.assertEqual(dataset, {'EGAD01'}) + self.assertEqual(dataset, {"EGAD01"}) # Test: 2 passports, 2 unique datasets, 2 permissions # Can't test this case with the current design! # Would need a way for validate_passport() to mock two different results async def test_ga4gh_bona_fide(self): """Test ga4gh statuses claim parsing.""" - passports = [("enc", "header", { - "ga4gh_visa_v1": {"type": "AcceptedTermsAndPolicies", - "value": "https://doi.org/10.1038/s41431-018-0219-y", - "source": "https://ga4gh.org/duri/no_org", - "by": "self", - "asserted": 1539069213, - "expires": 4694742813} - }), - ("enc", "header", { - "ga4gh_visa_v1": {"type": "ResearcherStatus", - "value": "https://doi.org/10.1038/s41431-018-0219-y", - "source": "https://ga4gh.org/duri/no_org", - "by": "peer", - "asserted": 1539017776, - "expires": 1593165413}})] + passports = [ + ( + "enc", + "header", + { + "ga4gh_visa_v1": { + "type": "AcceptedTermsAndPolicies", + "value": "https://doi.org/10.1038/s41431-018-0219-y", + "source": "https://ga4gh.org/duri/no_org", + "by": "self", + "asserted": 1539069213, + "expires": 4694742813, + } + }, + ), + ( + "enc", + "header", + { + "ga4gh_visa_v1": { + "type": "ResearcherStatus", + "value": "https://doi.org/10.1038/s41431-018-0219-y", + "source": "https://ga4gh.org/duri/no_org", + "by": "peer", + "asserted": 1539017776, + "expires": 1593165413, + } + }, + ), + ] # Good test: both required passport types contained the correct value bona_fide_status = await get_ga4gh_bona_fide(passports) self.assertEqual(bona_fide_status, True) # has bona fide @@ -376,12 +396,12 @@ async def test_ga4gh_bona_fide(self): bona_fide_status = await get_ga4gh_bona_fide(passports_empty) self.assertEqual(bona_fide_status, False) # doesn't have bona fide - @asynctest.mock.patch('beacon_api.permissions.ga4gh.get_jwk') - @asynctest.mock.patch('beacon_api.permissions.ga4gh.jwt') - @asynctest.mock.patch('beacon_api.permissions.ga4gh.LOG') + @asynctest.mock.patch("beacon_api.permissions.ga4gh.get_jwk") + @asynctest.mock.patch("beacon_api.permissions.ga4gh.jwt") + @asynctest.mock.patch("beacon_api.permissions.ga4gh.LOG") async def test_validate_passport(self, mock_log, m_jwt, m_jwk): """Test passport validation.""" - m_jwk.return_value = 'jwk' + m_jwk.return_value = "jwk" # Test: validation passed m_jwt.return_value = MockDecodedPassport() await validate_passport({}) @@ -394,7 +414,7 @@ async def test_validate_passport(self, mock_log, m_jwt, m_jwk): # need to assert the log called mock_log.error.assert_called_with("Something went wrong when processing JWT tokens: 1") - @asynctest.mock.patch('beacon_api.permissions.ga4gh.get_ga4gh_permissions') + @asynctest.mock.patch("beacon_api.permissions.ga4gh.get_ga4gh_permissions") async def test_check_ga4gh_token(self, m_get_perms): """Test token scopes.""" # Test: no scope found @@ -403,28 +423,28 @@ async def test_check_ga4gh_token(self, m_get_perms): self.assertEqual(dataset_permissions, set()) self.assertEqual(bona_fide_status, False) # Test: scope is ok, but no claims - decoded_data = {'scope': ''} + decoded_data = {"scope": ""} dataset_permissions, bona_fide_status = await check_ga4gh_token(decoded_data, {}, False, set()) self.assertEqual(dataset_permissions, set()) self.assertEqual(bona_fide_status, False) # Test: scope is ok, claims are ok - m_get_perms.return_value = {'EGAD01'}, True - decoded_data = {'scope': 'openid ga4gh_passport_v1'} + m_get_perms.return_value = {"EGAD01"}, True + decoded_data = {"scope": "openid ga4gh_passport_v1"} dataset_permissions, bona_fide_status = await check_ga4gh_token(decoded_data, {}, False, set()) - self.assertEqual(dataset_permissions, {'EGAD01'}) + self.assertEqual(dataset_permissions, {"EGAD01"}) self.assertEqual(bona_fide_status, True) async def test_decode_passport(self): """Test key-less JWT decoding.""" - token, _ = generate_token('http://test.csc.fi') + token, _ = generate_token("http://test.csc.fi") header, payload = await decode_passport(token) - self.assertEqual(header.get('alg'), 'HS256') - self.assertEqual(payload.get('iss'), 'http://test.csc.fi') + self.assertEqual(header.get("alg"), "HS256") + self.assertEqual(payload.get("iss"), "http://test.csc.fi") - @asynctest.mock.patch('beacon_api.permissions.ga4gh.get_ga4gh_bona_fide') - @asynctest.mock.patch('beacon_api.permissions.ga4gh.get_ga4gh_controlled') - @asynctest.mock.patch('beacon_api.permissions.ga4gh.decode_passport') - @asynctest.mock.patch('beacon_api.permissions.ga4gh.retrieve_user_data') + @asynctest.mock.patch("beacon_api.permissions.ga4gh.get_ga4gh_bona_fide") + @asynctest.mock.patch("beacon_api.permissions.ga4gh.get_ga4gh_controlled") + @asynctest.mock.patch("beacon_api.permissions.ga4gh.decode_passport") + @asynctest.mock.patch("beacon_api.permissions.ga4gh.retrieve_user_data") async def test_get_ga4gh_permissions(self, m_userinfo, m_decode, m_controlled, m_bonafide): """Test GA4GH permissions main function.""" # Test: no data (nothing) @@ -440,25 +460,17 @@ async def test_get_ga4gh_permissions(self, m_userinfo, m_decode, m_controlled, m # Test: permissions m_userinfo.return_value = [{}] header = {} - payload = { - 'ga4gh_visa_v1': { - 'type': 'ControlledAccessGrants' - } - } + payload = {"ga4gh_visa_v1": {"type": "ControlledAccessGrants"}} m_decode.return_value = header, payload - m_controlled.return_value = {'EGAD01'} + m_controlled.return_value = {"EGAD01"} m_bonafide.return_value = False dataset_permissions, bona_fide_status = await get_ga4gh_permissions({}) - self.assertEqual(dataset_permissions, {'EGAD01'}) + self.assertEqual(dataset_permissions, {"EGAD01"}) self.assertEqual(bona_fide_status, False) # Test: bona fide m_userinfo.return_value = [{}] header = {} - payload = { - 'ga4gh_visa_v1': { - 'type': 'ResearcherStatus' - } - } + payload = {"ga4gh_visa_v1": {"type": "ResearcherStatus"}} m_decode.return_value = header, payload m_controlled.return_value = set() m_bonafide.return_value = True @@ -467,5 +479,5 @@ async def test_get_ga4gh_permissions(self, m_userinfo, m_decode, m_controlled, m self.assertEqual(bona_fide_status, True) -if __name__ == '__main__': +if __name__ == "__main__": asynctest.main() diff --git a/tests/test_data_query.py b/tests/test_data_query.py index 587f0e90..1e5cc705 100644 --- a/tests/test_data_query.py +++ b/tests/test_data_query.py @@ -16,9 +16,19 @@ class Record: Mimic asyncpg Record object. """ - def __init__(self, accessType, frequency=None, createDateTime=None, updateDateTime=None, - referenceBases=None, alternateBases=None, start=None, end=None, - variantCount=0, variantType=None): + def __init__( + self, + accessType, + frequency=None, + createDateTime=None, + updateDateTime=None, + referenceBases=None, + alternateBases=None, + start=None, + end=None, + variantCount=0, + variantType=None, + ): """Initialise things.""" self.data = {"accessType": accessType} # self.variantCount = variantCount @@ -87,29 +97,46 @@ def test_filter_exists(self): def test_transform_record(self): """Test transform DB record.""" - response = {"frequency": 0.009112876, "info": {"accessType": "PUBLIC"}, - "referenceBases": "CT", "alternateBases": "AT", - "start": 10, "end": 12, - "variantCount": 3, "variantType": "MNP"} + response = { + "frequency": 0.009112876, + "info": {"accessType": "PUBLIC"}, + "referenceBases": "CT", + "alternateBases": "AT", + "start": 10, + "end": 12, + "variantCount": 3, + "variantType": "MNP", + } record = Record("PUBLIC", 0.009112875989879, referenceBases="CT", alternateBases="AT", start=10, end=12, variantCount=3, variantType="MNP") result = transform_record(record) self.assertEqual(result, response) def test_transform_misses(self): """Test transform misses record.""" - response = {"referenceBases": '', "alternateBases": '', "variantType": "", - "frequency": 0, "callCount": 0, "sampleCount": 0, "variantCount": 0, - "start": 0, "end": 0, "info": {"accessType": "PUBLIC"}} + response = { + "referenceBases": "", + "alternateBases": "", + "variantType": "", + "frequency": 0, + "callCount": 0, + "sampleCount": 0, + "variantCount": 0, + "start": 0, + "end": 0, + "info": {"accessType": "PUBLIC"}, + } record = Record("PUBLIC") result = transform_misses(record) self.assertEqual(result, response) def test_transform_metadata(self): """Test transform medata record.""" - response = {"createDateTime": "2018-10-20T20:33:40Z", "updateDateTime": "2018-10-20T20:33:40Z", - "info": {"accessType": "PUBLIC"}} - record = Record("PUBLIC", createDateTime=datetime.strptime("2018-10-20 20:33:40+00", '%Y-%m-%d %H:%M:%S+00'), - updateDateTime=datetime.strptime("2018-10-20 20:33:40+00", '%Y-%m-%d %H:%M:%S+00')) + response = {"createDateTime": "2018-10-20T20:33:40Z", "updateDateTime": "2018-10-20T20:33:40Z", "info": {"accessType": "PUBLIC"}} + record = Record( + "PUBLIC", + createDateTime=datetime.strptime("2018-10-20 20:33:40+00", "%Y-%m-%d %H:%M:%S+00"), + updateDateTime=datetime.strptime("2018-10-20 20:33:40+00", "%Y-%m-%d %H:%M:%S+00"), + ) result = transform_metadata(record) self.assertEqual(result, response) @@ -117,43 +144,42 @@ def test_add_handover(self): """Test that add handover.""" # Test that the handover actually is added handovers = [{"handover1": "info"}, {"handover2": "url"}] - record = {"datasetId": "test", "referenceName": "22", "referenceBases": "A", - "alternateBases": "C", "start": 10, "end": 11, "variantType": "SNP"} - with mock.patch('beacon_api.extensions.handover.make_handover', return_value=handovers): + record = {"datasetId": "test", "referenceName": "22", "referenceBases": "A", "alternateBases": "C", "start": 10, "end": 11, "variantType": "SNP"} + with mock.patch("beacon_api.extensions.handover.make_handover", return_value=handovers): result = add_handover(record) - record['datasetHandover'] = handovers + record["datasetHandover"] = handovers self.assertEqual(result, record) def test_make_handover(self): """Test make handover.""" - paths = [('lab1', 'desc1', 'path1'), ('lab2', 'desc2', 'path2')] - result = make_handover(paths, ['id1', 'id2', 'id1']) + paths = [("lab1", "desc1", "path1"), ("lab2", "desc2", "path2")] + result = make_handover(paths, ["id1", "id2", "id1"]) # The number of handovers = number of paths * number of unique datasets self.assertEqual(len(result), 4) self.assertIn("path1", result[0]["url"]) - self.assertEqual(result[0]["description"], 'desc1') + self.assertEqual(result[0]["description"], "desc1") - @asynctest.mock.patch('beacon_api.utils.data_query.fetch_filtered_dataset') + @asynctest.mock.patch("beacon_api.utils.data_query.fetch_filtered_dataset") async def test_find_datasets(self, mock_filtered): """Test find datasets.""" mock_filtered.return_value = [] token = dict() token["bona_fide_status"] = False - result = await find_datasets(None, 'GRCh38', None, 'Y', 'T', 'C', [], token, "NONE") + result = await find_datasets(None, "GRCh38", None, "Y", "T", "C", [], token, "NONE") self.assertEqual(result, []) # setting ALL should cover MISS call as well - result_all = await find_datasets(None, 'GRCh38', None, 'Y', 'T', 'C', [], token, "ALL") + result_all = await find_datasets(None, "GRCh38", None, "Y", "T", "C", [], token, "ALL") self.assertEqual(result_all, []) async def test_datasets_access_call_public(self): """Test db call of getting public datasets access.""" pool = asynctest.CoroutineMock() - pool.acquire().__aenter__.return_value = Connection(accessData=[{'accesstype': 'PUBLIC', 'datasetid': 'mock:public:id'}]) + pool.acquire().__aenter__.return_value = Connection(accessData=[{"accesstype": "PUBLIC", "datasetid": "mock:public:id"}]) result = await fetch_datasets_access(pool, None) # for now it can return a tuple of empty datasets # in order to get a response we will have to mock it # in Connection() class - self.assertEqual(result, (['mock:public:id'], [], [])) + self.assertEqual(result, (["mock:public:id"], [], [])) async def test_datasets_access_call_exception(self): """Test db call of getting public datasets access with exception.""" @@ -165,33 +191,34 @@ async def test_datasets_access_call_exception(self): async def test_datasets_access_call_registered(self): """Test db call of getting registered datasets access.""" pool = asynctest.CoroutineMock() - pool.acquire().__aenter__.return_value = Connection(accessData=[{'accesstype': 'REGISTERED', 'datasetid': 'mock:registered:id'}]) + pool.acquire().__aenter__.return_value = Connection(accessData=[{"accesstype": "REGISTERED", "datasetid": "mock:registered:id"}]) result = await fetch_datasets_access(pool, None) # for now it can return a tuple of empty datasets # in order to get a response we will have to mock it # in Connection() class - self.assertEqual(result, ([], ['mock:registered:id'], [])) + self.assertEqual(result, ([], ["mock:registered:id"], [])) async def test_datasets_access_call_controlled(self): """Test db call of getting controlled datasets access.""" pool = asynctest.CoroutineMock() - pool.acquire().__aenter__.return_value = Connection(accessData=[{'accesstype': 'CONTROLLED', 'datasetid': 'mock:controlled:id'}]) + pool.acquire().__aenter__.return_value = Connection(accessData=[{"accesstype": "CONTROLLED", "datasetid": "mock:controlled:id"}]) result = await fetch_datasets_access(pool, None) # for now it can return a tuple of empty datasets # in order to get a response we will have to mock it # in Connection() class - self.assertEqual(result, ([], [], ['mock:controlled:id'])) + self.assertEqual(result, ([], [], ["mock:controlled:id"])) async def test_datasets_access_call_multiple(self): """Test db call of getting controlled and public datasets access.""" pool = asynctest.CoroutineMock() - pool.acquire().__aenter__.return_value = Connection(accessData=[{'accesstype': 'CONTROLLED', 'datasetid': 'mock:controlled:id'}, - {'accesstype': 'PUBLIC', 'datasetid': 'mock:public:id'}]) + pool.acquire().__aenter__.return_value = Connection( + accessData=[{"accesstype": "CONTROLLED", "datasetid": "mock:controlled:id"}, {"accesstype": "PUBLIC", "datasetid": "mock:public:id"}] + ) result = await fetch_datasets_access(pool, None) # for now it can return a tuple of empty datasets # in order to get a response we will have to mock it # in Connection() class - self.assertEqual(result, (['mock:public:id'], [], ['mock:controlled:id'])) + self.assertEqual(result, (["mock:public:id"], [], ["mock:controlled:id"])) async def test_fetch_dataset_metadata_call(self): """Test db call of getting datasets metadata.""" @@ -213,32 +240,61 @@ async def test_fetch_dataset_metadata_call_exception(self): async def test_fetch_filtered_dataset_call(self): """Test db call for retrieving main data.""" pool = asynctest.CoroutineMock() - db_response = {"referenceBases": '', "alternateBases": '', "variantType": "", - "referenceName": 'Chr38', - "frequency": 0, "callCount": 0, "sampleCount": 0, "variantCount": 0, - "start": 0, "end": 0, "accessType": "PUBLIC", "datasetId": "test"} + db_response = { + "referenceBases": "", + "alternateBases": "", + "variantType": "", + "referenceName": "Chr38", + "frequency": 0, + "callCount": 0, + "sampleCount": 0, + "variantCount": 0, + "start": 0, + "end": 0, + "accessType": "PUBLIC", + "datasetId": "test", + } pool.acquire().__aenter__.return_value = Connection(accessData=[db_response]) - assembly_id = 'GRCh38' + assembly_id = "GRCh38" position = (10, 20, None, None, None, None) chromosome = 1 - reference = 'A' - alternate = ('DUP', None) + reference = "A" + alternate = ("DUP", None) result = await fetch_filtered_dataset(pool, assembly_id, position, chromosome, reference, alternate, None, None, False) # for now it can return empty dataset # in order to get a response we will have to mock it # in Connection() class - expected = {'referenceName': 'Chr38', 'callCount': 0, 'sampleCount': 0, 'variantCount': 0, 'datasetId': 'test', - 'referenceBases': '', 'alternateBases': '', 'variantType': '', 'start': 0, 'end': 0, 'frequency': 0, - 'info': {'accessType': 'PUBLIC'}, - 'datasetHandover': [{'handoverType': {'id': 'CUSTOM', 'label': 'Variants'}, - 'description': 'browse the variants matched by the query', - 'url': 'https://examplebrowser.org/dataset/test/browser/variant/Chr38-1--'}, - {'handoverType': {'id': 'CUSTOM', 'label': 'Region'}, - 'description': 'browse data of the region matched by the query', - 'url': 'https://examplebrowser.org/dataset/test/browser/region/Chr38-1-1'}, - {'handoverType': {'id': 'CUSTOM', 'label': 'Data'}, - 'description': 'retrieve information of the datasets', - 'url': 'https://examplebrowser.org/dataset/test/browser'}]} + expected = { + "referenceName": "Chr38", + "callCount": 0, + "sampleCount": 0, + "variantCount": 0, + "datasetId": "test", + "referenceBases": "", + "alternateBases": "", + "variantType": "", + "start": 0, + "end": 0, + "frequency": 0, + "info": {"accessType": "PUBLIC"}, + "datasetHandover": [ + { + "handoverType": {"id": "CUSTOM", "label": "Variants"}, + "description": "browse the variants matched by the query", + "url": "https://examplebrowser.org/dataset/test/browser/variant/Chr38-1--", + }, + { + "handoverType": {"id": "CUSTOM", "label": "Region"}, + "description": "browse data of the region matched by the query", + "url": "https://examplebrowser.org/dataset/test/browser/region/Chr38-1-1", + }, + { + "handoverType": {"id": "CUSTOM", "label": "Data"}, + "description": "retrieve information of the datasets", + "url": "https://examplebrowser.org/dataset/test/browser", + }, + ], + } self.assertEqual(result, [expected]) @@ -246,21 +302,21 @@ async def test_fetch_filtered_dataset_call_misses(self): """Test db call for retrieving miss data.""" pool = asynctest.CoroutineMock() pool.acquire().__aenter__.return_value = Connection() # db_response is [] - assembly_id = 'GRCh38' + assembly_id = "GRCh38" position = (10, 20, None, None, None, None) chromosome = 1 - reference = 'A' - alternate = ('DUP', None) + reference = "A" + alternate = ("DUP", None) result_miss = await fetch_filtered_dataset(pool, assembly_id, position, chromosome, reference, alternate, None, None, True) self.assertEqual(result_miss, []) async def test_fetch_filtered_dataset_call_exception(self): """Test db call of retrieving main data with exception.""" - assembly_id = 'GRCh38' + assembly_id = "GRCh38" position = (10, 20, None, None, None, None) chromosome = 1 - reference = 'A' - alternate = ('DUP', None) + reference = "A" + alternate = ("DUP", None) pool = asynctest.CoroutineMock() pool.acquire().__aenter__.return_value = ConnectionException() with self.assertRaises(aiohttp.web_exceptions.HTTPInternalServerError): @@ -268,13 +324,13 @@ async def test_fetch_filtered_dataset_call_exception(self): def test_handle_wildcard(self): """Test PostgreSQL wildcard handling.""" - sequence1 = 'ATCG' - sequence2 = 'ATNG' - sequence3 = 'NNCN' - self.assertEqual(handle_wildcard(sequence1), ['ATCG']) + sequence1 = "ATCG" + sequence2 = "ATNG" + sequence3 = "NNCN" + self.assertEqual(handle_wildcard(sequence1), ["ATCG"]) self.assertEqual(handle_wildcard(sequence2), ["%AT_G%"]) self.assertEqual(handle_wildcard(sequence3), ["%__C_%"]) -if __name__ == '__main__': +if __name__ == "__main__": asynctest.main() diff --git a/tests/test_db_load.py b/tests/test_db_load.py index 456e2dd6..960bda29 100644 --- a/tests/test_db_load.py +++ b/tests/test_db_load.py @@ -177,23 +177,23 @@ def setUp(self): 19 112 . A G 10 . . GT:HQ 0|0:10,10 0|0:10,10 0/1:3,3 20 14370 rs6054257 G A 29 PASS NS=3;DP=14;AF=0.5;DB;H2 GT:GQ:DP:HQ 0|0:48:1:51,51 1|0:48:8:51,51 1/1:43:5:.,. chrM 15011 . T C . PASS . GT:GQ:DP:RO:QR:AO:QA:GL 1:160:970:0:0:968:31792:-2860.58,0 1:160:970:0:0:968:31792:-2860.58,0""" - self.datafile = self._dir.write('data.csv', self.data.encode('utf-8')) + self.datafile = self._dir.write("data.csv", self.data.encode("utf-8")) def tearDown(self): """Close database connection after tests.""" self._dir.cleanup_all() - @asynctest.mock.patch('beacon_api.utils.db_load.asyncpg.connect') + @asynctest.mock.patch("beacon_api.utils.db_load.asyncpg.connect") async def test_rchop(self, db_mock): """Test rchop for SVTYPE.""" db_mock.return_value = Connection() await self._db.connection() - result = self._db._rchop('INS:ME:LINE1', ":LINE1") - self.assertEqual('INS:ME', result) - result_no_ending = self._db._rchop('INS', ":LINE1") - self.assertEqual('INS', result_no_ending) + result = self._db._rchop("INS:ME:LINE1", ":LINE1") + self.assertEqual("INS:ME", result) + result_no_ending = self._db._rchop("INS", ":LINE1") + self.assertEqual("INS", result_no_ending) - @asynctest.mock.patch('beacon_api.utils.db_load.asyncpg.connect') + @asynctest.mock.patch("beacon_api.utils.db_load.asyncpg.connect") async def test_handle_type(self, db_mock): """Test handle type.""" db_mock.return_value = Connection() @@ -203,32 +203,32 @@ async def test_handle_type(self, db_mock): result_tuple = self._db._handle_type((0.1, 0.2), float) self.assertEqual([0.1, 0.2], result_tuple) - @asynctest.mock.patch('beacon_api.utils.db_load.asyncpg.connect') + @asynctest.mock.patch("beacon_api.utils.db_load.asyncpg.connect") async def test_bnd_parts(self, db_mock): """Test breakend parsing parts.""" db_mock.return_value = Connection() await self._db.connection() - result = self._db._bnd_parts('[CHR17:31356925[N', '126_2') - self.assertEqual(('chr17', 31356925, True, True, 'N', True, '126_2'), result) + result = self._db._bnd_parts("[CHR17:31356925[N", "126_2") + self.assertEqual(("chr17", 31356925, True, True, "N", True, "126_2"), result) - @asynctest.mock.patch('beacon_api.utils.db_load.asyncpg') + @asynctest.mock.patch("beacon_api.utils.db_load.asyncpg") async def test_connection(self, db_mock): """Test database URL fetching.""" await self._db.connection() db_mock.connect.assert_called() - @asynctest.mock.patch('beacon_api.utils.db_load.asyncpg.connect') + @asynctest.mock.patch("beacon_api.utils.db_load.asyncpg.connect") async def test_check_tables(self, db_mock): """Test checking tables.""" db_mock.return_value = Connection() await self._db.connection() db_mock.assert_called() - result = await self._db.check_tables(['DATATSET1', 'DATATSET2']) + result = await self._db.check_tables(["DATATSET1", "DATATSET2"]) # No Missing tables self.assertEqual(result, []) - @asynctest.mock.patch('beacon_api.utils.db_load.LOG') - @asynctest.mock.patch('beacon_api.utils.db_load.asyncpg.connect') + @asynctest.mock.patch("beacon_api.utils.db_load.LOG") + @asynctest.mock.patch("beacon_api.utils.db_load.asyncpg.connect") async def test_create_tables(self, db_mock, mock_log): """Test creating tables.""" sql = """CREATE TABLE IF NOT EXISTS beacon_data_table ( @@ -238,24 +238,24 @@ async def test_create_tables(self, db_mock, mock_log): db_mock.return_value = Connection() await self._db.connection() db_mock.assert_called() - sql_file = self._dir.write('sql.init', sql.encode('utf-8')) + sql_file = self._dir.write("sql.init", sql.encode("utf-8")) await self._db.create_tables(sql_file) # Should assert logs - mock_log.info.assert_called_with('Tables have been created') + mock_log.info.assert_called_with("Tables have been created") - @asynctest.mock.patch('beacon_api.utils.db_load.LOG') - @asynctest.mock.patch('beacon_api.utils.db_load.asyncpg.connect') + @asynctest.mock.patch("beacon_api.utils.db_load.LOG") + @asynctest.mock.patch("beacon_api.utils.db_load.asyncpg.connect") async def test_create_tables_exception(self, db_mock, mock_log): """Test creating tables exception.""" db_mock.return_value = ConnectionException() await self._db.connection() - await self._db.create_tables('sql.init') + await self._db.create_tables("sql.init") log = "AN ERROR OCCURRED WHILE ATTEMPTING TO CREATE TABLES -> [Errno 2] No such file or directory: 'sql.init'" mock_log.error.assert_called_with(log) - @asynctest.mock.patch('beacon_api.utils.db_load.LOG') - @asynctest.mock.patch('beacon_api.utils.db_load.asyncpg.connect') - @asynctest.mock.patch('beacon_api.utils.db_load.VCF') + @asynctest.mock.patch("beacon_api.utils.db_load.LOG") + @asynctest.mock.patch("beacon_api.utils.db_load.asyncpg.connect") + @asynctest.mock.patch("beacon_api.utils.db_load.VCF") async def test_load_metadata(self, mock_vcf, db_mock, mock_log): """Test load metadata.""" metadata = """{"name": "ALL.chrMT.phase3_callmom-v0_4.20130502.genotypes.vcf", @@ -270,65 +270,61 @@ async def test_load_metadata(self, mock_vcf, db_mock, mock_log): db_mock.return_value = Connection() await self._db.connection() db_mock.assert_called() - metafile = self._dir.write('data.json', metadata.encode('utf-8')) - vcf = asynctest.mock.MagicMock(name='samples') + metafile = self._dir.write("data.json", metadata.encode("utf-8")) + vcf = asynctest.mock.MagicMock(name="samples") vcf.samples.return_value = [1, 2, 3] await self._db.load_metadata(vcf, metafile, self.datafile) # Should assert logs - mock_log.info.mock_calls = [f'Parsing metadata from {metafile}', - 'Metadata has been parsed'] + mock_log.info.mock_calls = [f"Parsing metadata from {metafile}", "Metadata has been parsed"] - @asynctest.mock.patch('beacon_api.utils.db_load.LOG') - @asynctest.mock.patch('beacon_api.utils.db_load.asyncpg.connect') + @asynctest.mock.patch("beacon_api.utils.db_load.LOG") + @asynctest.mock.patch("beacon_api.utils.db_load.asyncpg.connect") async def test_load_metadata_exception(self, db_mock, mock_log): """Test load metadata error.""" db_mock.return_value = ConnectionException() await self._db.connection() - vcf = asynctest.mock.MagicMock(name='samples') + vcf = asynctest.mock.MagicMock(name="samples") vcf.samples.return_value = [1, 2, 3] - await self._db.load_metadata(vcf, 'meta.are', 'datafile') + await self._db.load_metadata(vcf, "meta.are", "datafile") log = "AN ERROR OCCURRED WHILE ATTEMPTING TO PARSE METADATA -> [Errno 2] No such file or directory: 'meta.are'" mock_log.error.assert_called_with(log) - @asynctest.mock.patch('beacon_api.utils.db_load.LOG') - @asynctest.mock.patch('beacon_api.utils.db_load.asyncpg.connect') + @asynctest.mock.patch("beacon_api.utils.db_load.LOG") + @asynctest.mock.patch("beacon_api.utils.db_load.asyncpg.connect") async def test_load_datafile(self, db_mock, mock_log): """Test load_datafile.""" db_mock.return_value = Connection() - vcf = asynctest.mock.MagicMock(name='samples') - vcf.return_value = [{'record': 1}, {'record': 2}, {'records': 3}] - vcf.samples.return_value = [{'record': 1}, {'record': 2}, {'records': 3}] + vcf = asynctest.mock.MagicMock(name="samples") + vcf.return_value = [{"record": 1}, {"record": 2}, {"records": 3}] + vcf.samples.return_value = [{"record": 1}, {"record": 2}, {"records": 3}] await self._db.connection() db_mock.assert_called() - await self._db.load_datafile(vcf, self.datafile, 'DATASET1') + await self._db.load_datafile(vcf, self.datafile, "DATASET1") # Should assert logs - mock_log.info.mock_calls = [f'Read data from {self.datafile}', - f'{self.datafile} has been processed'] + mock_log.info.mock_calls = [f"Read data from {self.datafile}", f"{self.datafile} has been processed"] - @asynctest.mock.patch('beacon_api.utils.db_load.LOG') - @asynctest.mock.patch('beacon_api.utils.db_load.asyncpg.connect') + @asynctest.mock.patch("beacon_api.utils.db_load.LOG") + @asynctest.mock.patch("beacon_api.utils.db_load.asyncpg.connect") async def test_insert_variants(self, db_mock, mock_log): """Test insert variants.""" db_mock.return_value = Connection() await self._db.connection() db_mock.assert_called() - await self._db.insert_variants('DATASET1', ['C'], 1) + await self._db.insert_variants("DATASET1", ["C"], 1) # Should assert logs - mock_log.info.mock_calls = ['Received 1 variants for insertion to DATASET1', - 'Insert variants into the database'] + mock_log.info.mock_calls = ["Received 1 variants for insertion to DATASET1", "Insert variants into the database"] - @asynctest.mock.patch('beacon_api.utils.db_load.LOG') - @asynctest.mock.patch('beacon_api.utils.db_load.asyncpg.connect') + @asynctest.mock.patch("beacon_api.utils.db_load.LOG") + @asynctest.mock.patch("beacon_api.utils.db_load.asyncpg.connect") async def test_close(self, db_mock, mock_log): """Test database URL close.""" db_mock.return_value = Connection() await self._db.connection() await self._db.close() - mock_log.info.mock_calls = ['Mark the database connection to be closed', - 'The database connection has been closed'] + mock_log.info.mock_calls = ["Mark the database connection to be closed", "The database connection has been closed"] - @asynctest.mock.patch('beacon_api.utils.db_load.LOG') - @asynctest.mock.patch('beacon_api.utils.db_load.asyncpg.connect') + @asynctest.mock.patch("beacon_api.utils.db_load.LOG") + @asynctest.mock.patch("beacon_api.utils.db_load.asyncpg.connect") async def test_close_error(self, db_mock, mock_log): """Test database URL close error.""" db_mock.return_value = ConnectionException() @@ -337,35 +333,35 @@ async def test_close_error(self, db_mock, mock_log): log = "AN ERROR OCCURRED WHILE ATTEMPTING TO CLOSE DATABASE CONNECTION -> 'ConnectionException' object has no attribute 'close'" mock_log.error.assert_called_with(log) - @asynctest.mock.patch('beacon_api.utils.db_load.LOG') - @asynctest.mock.patch('beacon_api.utils.db_load.asyncpg.connect') + @asynctest.mock.patch("beacon_api.utils.db_load.LOG") + @asynctest.mock.patch("beacon_api.utils.db_load.asyncpg.connect") async def test_unpack(self, db_mock, mock_log): """Test database URL fetching.""" db_mock.return_value = Connection() await self._db.connection() - inf1 = INFO((1), 'i', 3, None) - variant_1 = Variant(['C'], 'T', inf1, 0.7, 'indel', 3) + inf1 = INFO((1), "i", 3, None) + variant_1 = Variant(["C"], "T", inf1, 0.7, "indel", 3) result = self._db._unpack(variant_1) - self.assertEqual(([0.3333333333333333], [1], ['SNP'], ['C'], 3, []), result) - inf2 = INFO(1, 'M', 3, None) - variant_2 = Variant(['AT', 'A'], 'ATA', inf2, 0.7, 'mnp', 3) + self.assertEqual(([0.3333333333333333], [1], ["SNP"], ["C"], 3, []), result) + inf2 = INFO(1, "M", 3, None) + variant_2 = Variant(["AT", "A"], "ATA", inf2, 0.7, "mnp", 3) result = self._db._unpack(variant_2) - self.assertEqual(([0.3333333333333333], [1], ['DEL', 'DEL'], ['AT', 'A'], 3, []), result) - inf3 = INFO((1), 'S', 3, 0.5) - variant_3 = Variant(['TC'], 'T', inf3, 0.7, 'snp', 3) + self.assertEqual(([0.3333333333333333], [1], ["DEL", "DEL"], ["AT", "A"], 3, []), result) + inf3 = INFO((1), "S", 3, 0.5) + variant_3 = Variant(["TC"], "T", inf3, 0.7, "snp", 3) result = self._db._unpack(variant_3) - self.assertEqual(([0.5], [1], ['INS'], ['TC'], 3, []), result) - inf4 = INFO((1), '', 3, None, 'BND') - variant_4 = Variant(['TC'], 'T', inf4, 0.7, 'snp', 3) + self.assertEqual(([0.5], [1], ["INS"], ["TC"], 3, []), result) + inf4 = INFO((1), "", 3, None, "BND") + variant_4 = Variant(["TC"], "T", inf4, 0.7, "snp", 3) result = self._db._unpack(variant_4) - self.assertEqual(([0.3333333333333333], [1], ['SNP'], ['TC'], 3, []), result) - inf5 = INFO((1), 'S', 3, None, '') - variant_5 = Variant(['TC'], 'T', inf5, 0.7, 'ins', 3) + self.assertEqual(([0.3333333333333333], [1], ["SNP"], ["TC"], 3, []), result) + inf5 = INFO((1), "S", 3, None, "") + variant_5 = Variant(["TC"], "T", inf5, 0.7, "ins", 3) result5 = self._db._unpack(variant_5) - self.assertEqual(([0.3333333333333333], [1], ['INS'], ['TC'], 3, []), result5) + self.assertEqual(([0.3333333333333333], [1], ["INS"], ["TC"], 3, []), result5) - @asynctest.mock.patch('beacon_api.utils.db_load.LOG') - @asynctest.mock.patch('beacon_api.utils.db_load.asyncpg.connect') + @asynctest.mock.patch("beacon_api.utils.db_load.LOG") + @asynctest.mock.patch("beacon_api.utils.db_load.asyncpg.connect") async def test_chunks(self, db_mock, mock_log): """Test database URL fetching.""" db_mock.return_value = Connection() @@ -378,5 +374,5 @@ async def test_chunks(self, db_mock, mock_log): self.assertEqual([[(1, 2)], [(2, 3)]], lines) -if __name__ == '__main__': +if __name__ == "__main__": unittest.main() diff --git a/tox.ini b/tox.ini index 85f64c53..5961f548 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,5 @@ [tox] -envlist = py{36,38},flake8,docs,bandit,unit_tests,mypy +envlist = py{36,38},flake8,docs,bandit,unit_tests,mypy, black skipsdist = True [flake8] @@ -38,6 +38,12 @@ deps = types-ujson commands = mypy --ignore-missing-imports beacon_api/ +[testenv:black] +skip_install = true +deps = + black +commands = black . -l 160 --check + [testenv:unit_tests] setenv = CONFIG_FILE = {toxinidir}/tests/test.ini @@ -51,4 +57,4 @@ commands = py.test -x --cov=beacon_api tests/ --cov-fail-under=80 [gh-actions] python = 3.6: unit_tests - 3.8: flake8, unit_tests, docs, bandit, mypy + 3.8: flake8, black, unit_tests, docs, bandit, mypy From bf6975511afe5f36e87741e41c833af4ed2e84df Mon Sep 17 00:00:00 2001 From: Stefan Negru Date: Mon, 3 Jan 2022 09:19:10 +0200 Subject: [PATCH 3/4] fix old typo mistake of genome --- .wordlist.txt | 1 - data/example_metadata.json | 2 +- deploy/test/example_metadata_controlled.json | 2 +- deploy/test/example_metadata_controlled1.json | 2 +- deploy/test/example_metadata_registered.json | 2 +- dictionary.dic | Bin 4960 -> 4944 bytes docs/example.rst | 2 +- docs/instructions.rst | 2 +- 8 files changed, 6 insertions(+), 7 deletions(-) diff --git a/.wordlist.txt b/.wordlist.txt index 1cb362e4..4ce47fb9 100644 --- a/.wordlist.txt +++ b/.wordlist.txt @@ -61,7 +61,6 @@ FIDE finland ga genomic -genomoe gh GH GHBeacon diff --git a/data/example_metadata.json b/data/example_metadata.json index 98d502e7..3a7880dc 100644 --- a/data/example_metadata.json +++ b/data/example_metadata.json @@ -1,5 +1,5 @@ { - "name": "1000 genomoe", + "name": "1000 genome", "datasetId": "urn:hg:1000genome", "description": "Data from the 1000 Genomes Project. The 1000 Genomes Project ran between 2008 and 2015, creating the largest public catalogue of human variation and genotype data. As the project ended, the Data Coordination Centre at EMBL-EBI has received continued funding from the Wellcome Trust to maintain and expand the resource.", "assemblyId": "GRCh38", diff --git a/deploy/test/example_metadata_controlled.json b/deploy/test/example_metadata_controlled.json index 0ffa5631..b231895d 100644 --- a/deploy/test/example_metadata_controlled.json +++ b/deploy/test/example_metadata_controlled.json @@ -1,5 +1,5 @@ { - "name": "1000 genomoe", + "name": "1000 genome", "datasetId": "urn:hg:1000genome:controlled", "description": "Data from the 1000 Genomes Project. The 1000 Genomes Project ran between 2008 and 2015, creating the largest public catalogue of human variation and genotype data. As the project ended, the Data Coordination Centre at EMBL-EBI has received continued funding from the Wellcome Trust to maintain and expand the resource.", "assemblyId": "GRCh38", diff --git a/deploy/test/example_metadata_controlled1.json b/deploy/test/example_metadata_controlled1.json index e2084975..1efbfc4b 100644 --- a/deploy/test/example_metadata_controlled1.json +++ b/deploy/test/example_metadata_controlled1.json @@ -1,5 +1,5 @@ { - "name": "1000 genomoe", + "name": "1000 genome", "datasetId": "urn:hg:1000genome:controlled1", "description": "Data from the 1000 Genomes Project. The 1000 Genomes Project ran between 2008 and 2015, creating the largest public catalogue of human variation and genotype data. As the project ended, the Data Coordination Centre at EMBL-EBI has received continued funding from the Wellcome Trust to maintain and expand the resource.", "assemblyId": "GRCh38", diff --git a/deploy/test/example_metadata_registered.json b/deploy/test/example_metadata_registered.json index 858a0e0a..3900bd3d 100644 --- a/deploy/test/example_metadata_registered.json +++ b/deploy/test/example_metadata_registered.json @@ -1,5 +1,5 @@ { - "name": "1000 genomoe", + "name": "1000 genome", "datasetId": "urn:hg:1000genome:registered", "description": "Data from the 1000 Genomes Project. The 1000 Genomes Project ran between 2008 and 2015, creating the largest public catalogue of human variation and genotype data. As the project ended, the Data Coordination Centre at EMBL-EBI has received continued funding from the Wellcome Trust to maintain and expand the resource.", "assemblyId": "GRCh38", diff --git a/dictionary.dic b/dictionary.dic index 63cff1c64501f01b028f75969589eeede5bf2cc8..e20f793e9d9e6d8399cebc89dc0fc4a38beacb1f 100644 GIT binary patch delta 1034 zcmYk4Ur1A77{;GXXEv~%DJISfA(=KD{Q>EsKgh+qgYVkin=H( zavTWhp9V!x5!Eb$F72X>K#?e+mSA<~!0seuJ@2QrP}EeYSqIoj6CIj1x`gIF zTogJAd7y3lEnhKAFy!o6^ zCGkr$g$?WFY@tolK^NI;Ch=LAO5%s6i51N>LP3cu#DunmU1Zh<&XE6BJ{~HJ1s1D) zo7}U(Na0~{FyL!+Ta2iiFapiDIeXb#xhmi^-%Xs1x<`yA-;z6IboxHI_l$;~2u-7O zmEv}OU|}7rh09b3WyM)@c#Z2-=EitI$^GGKLaQVR!K^ zGCs^+?rgXrR}RuI%1#mQZY57GmHph$Q@uo&0jiF&$FkQy!I;DRYd#KhlXTqg-E)#0 zV!n^>>>xE8nM-nS&}*9eZS*kNZx6OsMtZ>_X(+3o;&F--H14LLnqv=8@P-1vo3C#T z++lU6`?9_@x0Aaf4MlgH8lkKY)IFtUJDXmnu0+oVe6F+JAuCn?e{b>v3Hr>j_7rQr cuBrLS^LhHbqNgY(ynfu)mA^Cjg#*g^3+TiwzW@LL delta 1050 zcmYk4T}TvR6o%hzce4i4b(aziQZLl}feb_<3PW%sW@ngYK5((F!mS8wNf5jU7AdGq zP#c4a$PglmvP9~px{#3QPRO9LL@FW#F`=J}pzS@MLwR8K`Mz_`dwynTrh1`zD82b% z9NK0=z(NEVDguTlx<qOr;lgdq7Oqes6l-8k*P5yWjNReF68p`MCGRiAw$>)Nnt1L8 z%6H5-5%|R4#E+tszCnLyaZT+UZ+T|6uX+`cdHM(0%bzMv5FVC4gEwiBLj#48Odr#dsHLYxVK^9}p$9!%wzmr_e p`u}^43plLvin-^Rv$Q7XC-0Y7=LKtuVj}3r Date: Mon, 3 Jan 2022 09:20:53 +0200 Subject: [PATCH 4/4] fix validated typo --- .wordlist.txt | 1 - docs/instructions.rst | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/.wordlist.txt b/.wordlist.txt index 4ce47fb9..7f8497bc 100644 --- a/.wordlist.txt +++ b/.wordlist.txt @@ -134,7 +134,6 @@ updateDateTime uri url utils -valited valueFrom variantCount varianttype diff --git a/docs/instructions.rst b/docs/instructions.rst index 282ecef0..23dbcc5d 100644 --- a/docs/instructions.rst +++ b/docs/instructions.rst @@ -101,7 +101,7 @@ in the supplied token. If disabled, the audience(s) of a token will not be valid Disabling this can be a good solution for standalone Beacons, that want to be able to use tokens generated by any authority. If ``verify_aud=True`` is set -provide also value(s) for ``audience`` key, as otherwise the audience will be attempted to be valited, but as no audiences +provide also value(s) for ``audience`` key, as otherwise the audience will be attempted to be validated, but as no audiences are listed, the validation will fail. .. note:: For implementing `CONTROLLED` dataset permissions see :ref:`permissions`.