Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions docs/release.rst
Original file line number Diff line number Diff line change
Expand Up @@ -9,8 +9,8 @@ Release notes
Bug fixes
~~~~~~~~~

* Coerce data to text for JSON parsing.
By :user:`John Kirkham <jakirkham>`; :issue:`429`
* Add and use utility functions to simplify reading and writing JSON.
By :user:`John Kirkham <jakirkham>`; :issue:`429`, :issue:`430`


.. _release_2.3.1:
Expand Down
6 changes: 2 additions & 4 deletions zarr/attrs.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
# -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, division
import json
from collections import MutableMapping


from zarr.errors import PermissionError
from zarr.meta import parse_metadata
from zarr.util import json_dumps


class Attributes(MutableMapping):
Expand Down Expand Up @@ -113,9 +113,7 @@ def put(self, d):
self._write_op(self._put_nosync, d)

def _put_nosync(self, d):
s = json.dumps(d, indent=4, sort_keys=True, ensure_ascii=True,
separators=(',', ': '))
self.store[self.key] = s.encode('ascii')
self.store[self.key] = json_dumps(d)
if self.cache:
self._cached_asdict = d

Expand Down
2 changes: 1 addition & 1 deletion zarr/convenience.py
Original file line number Diff line number Diff line change
Expand Up @@ -1125,7 +1125,7 @@ def is_zarr_key(key):
for key in store if is_zarr_key(key)
}
}
store[metadata_key] = json_dumps(out).encode()
store[metadata_key] = json_dumps(out)
return open_consolidated(store, metadata_key=metadata_key)


Expand Down
32 changes: 4 additions & 28 deletions zarr/meta.py
Original file line number Diff line number Diff line change
@@ -1,39 +1,19 @@
# -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, division
import json
import base64
import codecs


import numpy as np
from numcodecs.compat import ensure_contiguous_ndarray


from zarr.compat import PY2, Mapping, text_type
from zarr.compat import PY2, Mapping
from zarr.errors import MetadataError
from zarr.util import json_dumps, json_loads


ZARR_FORMAT = 2


def ensure_text_type(s):
if not isinstance(s, text_type):
s = ensure_contiguous_ndarray(s)
s = codecs.decode(s, 'ascii')
return s


def json_dumps(o):
"""Write JSON in a consistent, human-readable way."""
return json.dumps(o, indent=4, sort_keys=True, ensure_ascii=True,
separators=(',', ': '))


def json_loads(s):
"""Read JSON in a consistent way."""
return json.loads(ensure_text_type(s))


def parse_metadata(s):

# Here we allow that a store may return an already-parsed metadata object,
Expand Down Expand Up @@ -95,9 +75,7 @@ def encode_array_metadata(meta):
order=meta['order'],
filters=meta['filters'],
)
s = json_dumps(meta)
b = s.encode('ascii')
return b
return json_dumps(meta)


def encode_dtype(d):
Expand Down Expand Up @@ -142,9 +120,7 @@ def encode_group_metadata(meta=None):
meta = dict(
zarr_format=ZARR_FORMAT,
)
s = json_dumps(meta)
b = s.encode('ascii')
return b
return json_dumps(meta)


FLOAT_FILLS = {
Expand Down
12 changes: 6 additions & 6 deletions zarr/n5.py
Original file line number Diff line number Diff line change
Expand Up @@ -71,14 +71,14 @@ def __getitem__(self, key):
key = key.replace(zarr_group_meta_key, n5_attrs_key)
value = group_metadata_to_zarr(self._load_n5_attrs(key))

return json_dumps(value).encode('ascii')
return json_dumps(value)

elif key.endswith(zarr_array_meta_key):

key = key.replace(zarr_array_meta_key, n5_attrs_key)
value = array_metadata_to_zarr(self._load_n5_attrs(key))

return json_dumps(value).encode('ascii')
return json_dumps(value)

elif key.endswith(zarr_attrs_key):

Expand All @@ -88,7 +88,7 @@ def __getitem__(self, key):
if len(value) == 0:
raise KeyError(key)
else:
return json_dumps(value).encode('ascii')
return json_dumps(value)

elif is_chunk_key(key):

Expand All @@ -105,7 +105,7 @@ def __setitem__(self, key, value):
n5_attrs = self._load_n5_attrs(key)
n5_attrs.update(**group_metadata_to_n5(json_loads(value)))

value = json_dumps(n5_attrs).encode('ascii')
value = json_dumps(n5_attrs)

elif key.endswith(zarr_array_meta_key):

Expand All @@ -114,7 +114,7 @@ def __setitem__(self, key, value):
n5_attrs = self._load_n5_attrs(key)
n5_attrs.update(**array_metadata_to_n5(json_loads(value)))

value = json_dumps(n5_attrs).encode('ascii')
value = json_dumps(n5_attrs)

elif key.endswith(zarr_attrs_key):

Expand All @@ -135,7 +135,7 @@ def __setitem__(self, key, value):
# add new user attributes
n5_attrs.update(**zarr_attrs)

value = json_dumps(n5_attrs).encode('ascii')
value = json_dumps(n5_attrs)

elif is_chunk_key(key):

Expand Down
5 changes: 2 additions & 3 deletions zarr/storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,15 +26,14 @@
import errno
import re
import sys
import json
import multiprocessing
from pickle import PicklingError
from threading import Lock, RLock
import glob
import warnings


from zarr.util import (normalize_shape, normalize_chunks, normalize_order,
from zarr.util import (json_loads, normalize_shape, normalize_chunks, normalize_order,
normalize_storage_path, buffer_size,
normalize_fill_value, nolock, normalize_dtype)
from zarr.meta import encode_array_metadata, encode_group_metadata
Expand Down Expand Up @@ -2458,7 +2457,7 @@ def __init__(self, store, metadata_key='.zmetadata'):
d = store[metadata_key].decode() # pragma: no cover
else: # pragma: no cover
d = store[metadata_key]
meta = json.loads(d)
meta = json_loads(d)

# check format of consolidated metadata
consolidated_format = meta.get('zarr_consolidated_format', None)
Expand Down
22 changes: 21 additions & 1 deletion zarr/util.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
# -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, division
from textwrap import TextWrapper, dedent
import codecs
import json
import numbers
import uuid
import inspect
Expand All @@ -9,7 +11,7 @@
from asciitree import BoxStyle, LeftAligned
from asciitree.traversal import Traversal
import numpy as np
from numcodecs.compat import ensure_ndarray
from numcodecs.compat import ensure_ndarray, ensure_contiguous_ndarray
from numcodecs.registry import codec_registry


Expand All @@ -24,6 +26,24 @@
}


def ensure_text_type(s):
if not isinstance(s, text_type):
s = ensure_contiguous_ndarray(s)
s = codecs.decode(s, 'ascii')
return s


def json_dumps(o):
"""Write JSON in a consistent, human-readable way."""
return json.dumps(o, indent=4, sort_keys=True, ensure_ascii=True,
separators=(',', ': ')).encode('ascii')


def json_loads(s):
"""Read JSON in a consistent way."""
return json.loads(ensure_text_type(s))


def normalize_shape(shape):
"""Convenience function to normalize the `shape` argument."""

Expand Down