|
16 | 16 | # specific language governing permissions and limitations |
17 | 17 | # under the License. |
18 | 18 |
|
19 | | -import sys |
20 | 19 | import uuid |
21 | 20 | from datetime import datetime |
22 | 21 | from decimal import Decimal |
|
33 | 32 |
|
34 | 33 | from elasticsearch_serverless import Elasticsearch |
35 | 34 | from elasticsearch_serverless.exceptions import SerializationError |
36 | | -from elasticsearch_serverless.serializer import JSONSerializer, TextSerializer |
| 35 | +from elasticsearch_serverless.serializer import ( |
| 36 | + JSONSerializer, |
| 37 | + OrjsonSerializer, |
| 38 | + TextSerializer, |
| 39 | +) |
37 | 40 |
|
38 | 41 | requires_numpy_and_pandas = pytest.mark.skipif( |
39 | | - np is None or pd is None, reason="Test requires numpy or pandas to be available" |
| 42 | + np is None or pd is None, reason="Test requires numpy and pandas to be available" |
40 | 43 | ) |
41 | 44 |
|
42 | 45 |
|
43 | | -def test_datetime_serialization(): |
44 | | - assert b'{"d":"2010-10-01T02:30:00"}' == JSONSerializer().dumps( |
| 46 | +@pytest.fixture(params=[JSONSerializer, OrjsonSerializer]) |
| 47 | +def json_serializer(request: pytest.FixtureRequest): |
| 48 | + yield request.param() |
| 49 | + |
| 50 | + |
| 51 | +def test_datetime_serialization(json_serializer): |
| 52 | + assert b'{"d":"2010-10-01T02:30:00"}' == json_serializer.dumps( |
45 | 53 | {"d": datetime(2010, 10, 1, 2, 30)} |
46 | 54 | ) |
47 | 55 |
|
48 | 56 |
|
49 | | -def test_decimal_serialization(): |
50 | | - requires_numpy_and_pandas() |
| 57 | +def test_decimal_serialization(json_serializer): |
| 58 | + assert b'{"d":3.8}' == json_serializer.dumps({"d": Decimal("3.8")}) |
51 | 59 |
|
52 | | - if sys.version_info[:2] == (2, 6): |
53 | | - pytest.skip("Float rounding is broken in 2.6.") |
54 | | - assert b'{"d":3.8}' == JSONSerializer().dumps({"d": Decimal("3.8")}) |
55 | 60 |
|
56 | | - |
57 | | -def test_uuid_serialization(): |
58 | | - assert b'{"d":"00000000-0000-0000-0000-000000000003"}' == JSONSerializer().dumps( |
| 61 | +def test_uuid_serialization(json_serializer): |
| 62 | + assert b'{"d":"00000000-0000-0000-0000-000000000003"}' == json_serializer.dumps( |
59 | 63 | {"d": uuid.UUID("00000000-0000-0000-0000-000000000003")} |
60 | 64 | ) |
61 | 65 |
|
62 | 66 |
|
63 | 67 | @requires_numpy_and_pandas |
64 | | -def test_serializes_numpy_bool(): |
65 | | - assert b'{"d":true}' == JSONSerializer().dumps({"d": np.bool_(True)}) |
| 68 | +def test_serializes_numpy_bool(json_serializer): |
| 69 | + assert b'{"d":true}' == json_serializer.dumps({"d": np.bool_(True)}) |
66 | 70 |
|
67 | 71 |
|
68 | 72 | @requires_numpy_and_pandas |
69 | | -def test_serializes_numpy_integers(): |
70 | | - ser = JSONSerializer() |
| 73 | +def test_serializes_numpy_integers(json_serializer): |
71 | 74 | for np_type in ( |
72 | 75 | np.int_, |
73 | 76 | np.int8, |
74 | 77 | np.int16, |
75 | 78 | np.int32, |
76 | 79 | np.int64, |
77 | 80 | ): |
78 | | - assert ser.dumps({"d": np_type(-1)}) == b'{"d":-1}' |
| 81 | + assert json_serializer.dumps({"d": np_type(-1)}) == b'{"d":-1}' |
79 | 82 |
|
80 | 83 | for np_type in ( |
81 | 84 | np.uint8, |
82 | 85 | np.uint16, |
83 | 86 | np.uint32, |
84 | 87 | np.uint64, |
85 | 88 | ): |
86 | | - assert ser.dumps({"d": np_type(1)}) == b'{"d":1}' |
| 89 | + assert json_serializer.dumps({"d": np_type(1)}) == b'{"d":1}' |
87 | 90 |
|
88 | 91 |
|
89 | 92 | @requires_numpy_and_pandas |
90 | | -def test_serializes_numpy_floats(): |
91 | | - ser = JSONSerializer() |
| 93 | +def test_serializes_numpy_floats(json_serializer): |
92 | 94 | for np_type in ( |
93 | 95 | np.float32, |
94 | 96 | np.float64, |
95 | 97 | ): |
96 | | - assert re.search(rb'^{"d":1\.2[\d]*}$', ser.dumps({"d": np_type(1.2)})) |
| 98 | + assert re.search( |
| 99 | + rb'^{"d":1\.2[\d]*}$', json_serializer.dumps({"d": np_type(1.2)}) |
| 100 | + ) |
97 | 101 |
|
98 | 102 |
|
99 | 103 | @requires_numpy_and_pandas |
100 | | -def test_serializes_numpy_datetime(): |
101 | | - assert b'{"d":"2010-10-01T02:30:00"}' == JSONSerializer().dumps( |
| 104 | +def test_serializes_numpy_datetime(json_serializer): |
| 105 | + assert b'{"d":"2010-10-01T02:30:00"}' == json_serializer.dumps( |
102 | 106 | {"d": np.datetime64("2010-10-01T02:30:00")} |
103 | 107 | ) |
104 | 108 |
|
105 | 109 |
|
106 | 110 | @requires_numpy_and_pandas |
107 | | -def test_serializes_numpy_ndarray(): |
108 | | - assert b'{"d":[0,0,0,0,0]}' == JSONSerializer().dumps( |
| 111 | +def test_serializes_numpy_ndarray(json_serializer): |
| 112 | + assert b'{"d":[0,0,0,0,0]}' == json_serializer.dumps( |
109 | 113 | {"d": np.zeros((5,), dtype=np.uint8)} |
110 | 114 | ) |
111 | 115 | # This isn't useful for Elasticsearch, just want to make sure it works. |
112 | | - assert b'{"d":[[0,0],[0,0]]}' == JSONSerializer().dumps( |
| 116 | + assert b'{"d":[[0,0],[0,0]]}' == json_serializer.dumps( |
113 | 117 | {"d": np.zeros((2, 2), dtype=np.uint8)} |
114 | 118 | ) |
115 | 119 |
|
116 | 120 |
|
117 | 121 | @requires_numpy_and_pandas |
118 | 122 | def test_serializes_numpy_nan_to_nan(): |
119 | | - assert b'{"d":NaN}' == JSONSerializer().dumps({"d": np.nan}) |
| 123 | + assert b'{"d":NaN}' == JSONSerializer().dumps({"d": float("NaN")}) |
| 124 | + # NaN is invalid JSON, and orjson silently converts it to null |
| 125 | + assert b'{"d":null}' == OrjsonSerializer().dumps({"d": float("NaN")}) |
120 | 126 |
|
121 | 127 |
|
122 | 128 | @requires_numpy_and_pandas |
123 | | -def test_serializes_pandas_timestamp(): |
124 | | - assert b'{"d":"2010-10-01T02:30:00"}' == JSONSerializer().dumps( |
| 129 | +def test_serializes_pandas_timestamp(json_serializer): |
| 130 | + assert b'{"d":"2010-10-01T02:30:00"}' == json_serializer.dumps( |
125 | 131 | {"d": pd.Timestamp("2010-10-01T02:30:00")} |
126 | 132 | ) |
127 | 133 |
|
128 | 134 |
|
129 | 135 | @requires_numpy_and_pandas |
130 | | -def test_serializes_pandas_series(): |
131 | | - assert b'{"d":["a","b","c","d"]}' == JSONSerializer().dumps( |
| 136 | +def test_serializes_pandas_series(json_serializer): |
| 137 | + assert b'{"d":["a","b","c","d"]}' == json_serializer.dumps( |
132 | 138 | {"d": pd.Series(["a", "b", "c", "d"])} |
133 | 139 | ) |
134 | 140 |
|
135 | 141 |
|
136 | 142 | @requires_numpy_and_pandas |
137 | 143 | @pytest.mark.skipif(not hasattr(pd, "NA"), reason="pandas.NA is required") |
138 | | -def test_serializes_pandas_na(): |
139 | | - assert b'{"d":null}' == JSONSerializer().dumps({"d": pd.NA}) |
| 144 | +def test_serializes_pandas_na(json_serializer): |
| 145 | + assert b'{"d":null}' == json_serializer.dumps({"d": pd.NA}) |
140 | 146 |
|
141 | 147 |
|
142 | 148 | @requires_numpy_and_pandas |
143 | 149 | @pytest.mark.skipif(not hasattr(pd, "NaT"), reason="pandas.NaT required") |
144 | | -def test_raises_serialization_error_pandas_nat(): |
| 150 | +def test_raises_serialization_error_pandas_nat(json_serializer): |
145 | 151 | with pytest.raises(SerializationError): |
146 | | - JSONSerializer().dumps({"d": pd.NaT}) |
| 152 | + json_serializer.dumps({"d": pd.NaT}) |
147 | 153 |
|
148 | 154 |
|
149 | 155 | @requires_numpy_and_pandas |
150 | | -def test_serializes_pandas_category(): |
| 156 | +def test_serializes_pandas_category(json_serializer): |
151 | 157 | cat = pd.Categorical(["a", "c", "b", "a"], categories=["a", "b", "c"]) |
152 | | - assert b'{"d":["a","c","b","a"]}' == JSONSerializer().dumps({"d": cat}) |
| 158 | + assert b'{"d":["a","c","b","a"]}' == json_serializer.dumps({"d": cat}) |
153 | 159 |
|
154 | 160 | cat = pd.Categorical([1, 2, 3], categories=[1, 2, 3]) |
155 | | - assert b'{"d":[1,2,3]}' == JSONSerializer().dumps({"d": cat}) |
| 161 | + assert b'{"d":[1,2,3]}' == json_serializer.dumps({"d": cat}) |
156 | 162 |
|
157 | 163 |
|
158 | | -def test_json_raises_serialization_error_on_dump_error(): |
| 164 | +def test_json_raises_serialization_error_on_dump_error(json_serializer): |
159 | 165 | with pytest.raises(SerializationError): |
160 | | - JSONSerializer().dumps(object()) |
| 166 | + json_serializer.dumps(object()) |
161 | 167 |
|
162 | 168 |
|
163 | 169 | def test_raises_serialization_error_on_load_error(): |
|
0 commit comments