diff --git a/src/IO/AbstractIOHandlerImpl.cpp b/src/IO/AbstractIOHandlerImpl.cpp index faad2f49a9..b6add8a6d9 100644 --- a/src/IO/AbstractIOHandlerImpl.cpp +++ b/src/IO/AbstractIOHandlerImpl.cpp @@ -268,7 +268,10 @@ std::future AbstractIOHandlerImpl::flush() i.writable->parent, "->", i.writable, - "] WRITE_DATASET"); + "] WRITE_DATASET, offset=", + [¶meter]() { return vec_as_string(parameter.offset); }, + ", extent=", + [¶meter]() { return vec_as_string(parameter.extent); }); writeDataset(i.writable, parameter); break; } diff --git a/src/binding/python/RecordComponent.cpp b/src/binding/python/RecordComponent.cpp index 9fac7a8cfe..6774b5e182 100644 --- a/src/binding/python/RecordComponent.cpp +++ b/src/binding/python/RecordComponent.cpp @@ -22,6 +22,7 @@ #include #include #include +#include #include #include "openPMD/Dataset.hpp" @@ -322,13 +323,16 @@ struct StoreChunkFromPythonArray Offset const &offset, Extent const &extent) { - // here, we increase a reference on the user-passed data so that + a.inc_ref(); + void *data = a.mutable_data(); + // here, we store an owning handle in the lambda capture so that // temporary and lost-scope variables stay alive until we flush // note: this does not yet prevent the user, as in C++, to build // a race condition by manipulating the data that was passed - a.inc_ref(); - void *data = a.mutable_data(); - std::shared_ptr shared((T *)data, [a](T *) { a.dec_ref(); }); + std::shared_ptr shared( + (T *)data, [owning_handle = a.cast()](T *) { + // no-op + }); r.storeChunk(std::move(shared), offset, extent); } @@ -343,13 +347,15 @@ struct LoadChunkIntoPythonArray Offset const &offset, Extent const &extent) { - // here, we increase a reference on the user-passed data so that + void *data = a.mutable_data(); + // here, we store an owning handle in the lambda capture so that // temporary and lost-scope variables stay alive until we flush // note: this does not yet prevent the user, as in C++, to build // a race condition by manipulating the data that was passed - a.inc_ref(); - void *data = a.mutable_data(); - std::shared_ptr shared((T *)data, [a](T *) { a.dec_ref(); }); + std::shared_ptr shared( + (T *)data, [owning_handle = a.cast()](T *) { + // no-op + }); r.loadChunk(std::move(shared), offset, extent); } @@ -365,14 +371,15 @@ struct LoadChunkIntoPythonBuffer Offset const &offset, Extent const &extent) { - // here, we increase a reference on the user-passed data so that + void *data = buffer_info.ptr; + // here, we store an owning handle in the lambda capture so that // temporary and lost-scope variables stay alive until we flush // note: this does not yet prevent the user, as in C++, to build // a race condition by manipulating the data that was passed - buffer.inc_ref(); - void *data = buffer_info.ptr; std::shared_ptr shared( - (T *)data, [buffer](T *) { buffer.dec_ref(); }); + (T *)data, [owning_handle = buffer.cast()](T *) { + // no-op + }); r.loadChunk(std::move(shared), offset, extent); } diff --git a/test/python/unittest/API/APITest.py b/test/python/unittest/API/APITest.py index 6337807f33..b81cef596d 100644 --- a/test/python/unittest/API/APITest.py +++ b/test/python/unittest/API/APITest.py @@ -82,6 +82,52 @@ def tearDown(self): del self.__particle_series del self.__series + # This function exhibits a bug in the old use of refcounting. + def refcountingCreateData(self): + series = io.Series( + "../samples/refcounting.json", + io.Access.create_linear, + ) + + for i in range(10): + current_iteration = series.snapshots()[i] + + # First, write an E mesh. + E = current_iteration.meshes["E"] + E.axis_labels = ["x", "y"] + for dim in ["x", "y"]: + component = E[dim] + component.reset_dataset( + io.Dataset(np.dtype("float"), [10, 10])) + component[:, :] = np.reshape( + np.arange(i * 100, (i + 1) * 100, dtype=np.dtype("float")), + [10, 10], + ) + + # Now, write some e particles. + e = current_iteration.particles["e"] + for dim in ["x", "y"]: + # Do not bother with a positionOffset + position_offset = e["positionOffset"][dim] + position_offset.make_constant(0) + + position = e["position"][dim] + position.reset_dataset(io.Dataset(np.dtype("float"), [100])) + position[:] = np.arange( + i * 100, (i + 1) * 100, dtype=np.dtype("float") + ) + + def testRefCounting(self): + self.refcountingCreateData() + + read = io.Series("../samples/refcounting.json", io.Access.read_linear) + iteration = read.snapshots()[0] + pos_x = iteration.particles["e"]["position"]["x"] + loaded = pos_x[:] + read.flush() + self.assertTrue(np.allclose( + loaded, np.arange(0, 100, dtype=np.dtype("float")))) + def testFieldData(self): """ Testing serial IO on a pure field dataset. """