diff --git a/cpp/src/arrow/python/pandas_convert.cc b/cpp/src/arrow/python/pandas_convert.cc index 636a3fd15c0..9f65af41bb2 100644 --- a/cpp/src/arrow/python/pandas_convert.cc +++ b/cpp/src/arrow/python/pandas_convert.cc @@ -444,7 +444,7 @@ inline Status PandasConverter::ConvertData(std::shared_ptr* data) { // Handle LONGLONG->INT64 and other fun things int type_num_compat = cast_npy_type_compat(PyArray_DESCR(arr_)->type_num); - if (traits::npy_type != type_num_compat) { + if (numpy_type_size(traits::npy_type) != numpy_type_size(type_num_compat)) { return Status::NotImplemented("NumPy type casts not yet implemented"); } diff --git a/cpp/src/arrow/python/type_traits.h b/cpp/src/arrow/python/type_traits.h index 26b15bdc9f4..b6761ae0d26 100644 --- a/cpp/src/arrow/python/type_traits.h +++ b/cpp/src/arrow/python/type_traits.h @@ -15,6 +15,8 @@ // specific language governing permissions and limitations // under the License. +// Internal header + #include "arrow/python/platform.h" #include @@ -24,6 +26,7 @@ #include "arrow/builder.h" #include "arrow/type.h" +#include "arrow/util/logging.h" namespace arrow { namespace py { @@ -224,5 +227,50 @@ struct arrow_traits { static constexpr bool supports_nulls = true; }; +static inline int numpy_type_size(int npy_type) { + switch (npy_type) { + case NPY_BOOL: + return 1; + case NPY_INT8: + return 1; + case NPY_INT16: + return 2; + case NPY_INT32: + return 4; + case NPY_INT64: + return 8; +#if (NPY_INT64 != NPY_LONGLONG) + case NPY_LONGLONG: + return 8; +#endif + case NPY_UINT8: + return 1; + case NPY_UINT16: + return 2; + case NPY_UINT32: + return 4; + case NPY_UINT64: + return 8; +#if (NPY_UINT64 != NPY_ULONGLONG) + case NPY_ULONGLONG: + return 8; +#endif + case NPY_FLOAT16: + return 2; + case NPY_FLOAT32: + return 4; + case NPY_FLOAT64: + return 8; + case NPY_DATETIME: + return 8; + case NPY_OBJECT: + return sizeof(void*); + default: + DCHECK(false) << "unhandled numpy type"; + break; + } + return -1; +} + } // namespace py } // namespace arrow diff --git a/cpp/src/arrow/util/stl.h b/cpp/src/arrow/util/stl.h index bfce111ff8a..d58689b7488 100644 --- a/cpp/src/arrow/util/stl.h +++ b/cpp/src/arrow/util/stl.h @@ -20,7 +20,7 @@ #include -#include +#include "arrow/util/logging.h" namespace arrow { diff --git a/python/pyarrow/tests/test_ipc.py b/python/pyarrow/tests/test_ipc.py index 81213ede315..02040678958 100644 --- a/python/pyarrow/tests/test_ipc.py +++ b/python/pyarrow/tests/test_ipc.py @@ -158,7 +158,8 @@ def run(self): connection.close() def get_result(self): - return(self._schema, self._table if self._do_read_all else self._batches) + return(self._schema, self._table if self._do_read_all + else self._batches) def setUp(self): # NOTE: must start and stop server in test diff --git a/python/pyarrow/tests/test_parquet.py b/python/pyarrow/tests/test_parquet.py index 268e87af7dd..8c446af03fc 100644 --- a/python/pyarrow/tests/test_parquet.py +++ b/python/pyarrow/tests/test_parquet.py @@ -348,6 +348,66 @@ def test_column_of_lists(tmpdir): tm.assert_frame_equal(df, df_read) +@parquet +def test_date_time_types(tmpdir): + buf = io.BytesIO() + + t1 = pa.date32() + data1 = np.array([17259, 17260, 17261], dtype='int32') + a1 = pa.Array.from_pandas(data1, type=t1) + + t2 = pa.date64() + data2 = data1.astype('int64') * 86400000 + a2 = pa.Array.from_pandas(data2, type=t2) + + t3 = pa.timestamp('us') + start = pd.Timestamp('2000-01-01').value / 1000 + data3 = np.array([start, start + 1, start + 2], dtype='int64') + a3 = pa.Array.from_pandas(data3, type=t3) + + t4 = pa.time32('ms') + data4 = np.arange(3, dtype='i4') + a4 = pa.Array.from_pandas(data4, type=t4) + + t5 = pa.time64('us') + a5 = pa.Array.from_pandas(data4.astype('int64'), type=t5) + + t6 = pa.time32('s') + a6 = pa.Array.from_pandas(data4, type=t6) + + ex_t6 = pa.time32('ms') + ex_a6 = pa.Array.from_pandas(data4 * 1000, type=ex_t6) + + table = pa.Table.from_arrays([a1, a2, a3, a4, a5, a6], + ['date32', 'date64', 'timestamp[us]', + 'time32[s]', 'time64[us]', 'time32[s]']) + + # date64 as date32 + # time32[s] to time32[ms] + expected = pa.Table.from_arrays([a1, a1, a3, a4, a5, ex_a6], + ['date32', 'date64', 'timestamp[us]', + 'time32[s]', 'time64[us]', 'time32[s]']) + + pq.write_table(table, buf, version="2.0") + buf.seek(0) + + result = pq.read_table(buf) + assert result.equals(expected) + + # Unsupported stuff + def _assert_unsupported(array): + table = pa.Table.from_arrays([array], ['unsupported']) + buf = io.BytesIO() + + with pytest.raises(NotImplementedError): + pq.write_table(table, buf, version="2.0") + + t7 = pa.time64('ns') + a7 = pa.Array.from_pandas(data4.astype('int64'), type=t7) + + _assert_unsupported(a7) + + @parquet def test_multithreaded_read(): df = alltypes_sample(size=10000)