From d015f976cb2285b460e14e23cc65019f82874644 Mon Sep 17 00:00:00 2001 From: "Alina (Xi) Li" Date: Tue, 2 Apr 2024 07:54:07 +0200 Subject: [PATCH 01/63] Add Arrow Flight SQL ODBC driver Co-authored-by: rscales Add initial framework for odbc dll - Add ARROW_FLIGHT_SQL_ODBC option. If we set `ARROW_FLIGHT_SQL_ODBC=ON`, the flightsql odbc folder will be built - Add odbc api layer for entry_point.cc - builds odbc dll file, with ODBC APIs exported in odbc.def Address James' comments Fix `odbcabstraction` build errors and partially fix `flightsql_odbc` errors Fix boost-variant not found error - Adding dependencies from odbc/vcpkg.json to cpp/vcpkg.json - Fix whereami.cc and .h dependency; ported lates code Update whereami.cc - use `long` instead of `int64`. Fixed namespace issues. - PR CI fix: Add `parquet-testing` back Partial build fix for `flight_sql` folder - Replaced `namespace arrow` and `namespace odbcabstraction` with `using namespace ...` - fix flight_sql_connection.cc Fix `util::nullopt` to use `std::nullopt` - fix std::optional - fix BufferReader - Fix GetSchema - fix json_converter.cc - partial fix configuration.h - partial fix get_info_cache.cc - Fix winsock build error - Comment out `flight_sql` files that cannot build - Comment out configuration and unit tests - Comment out get info cache and system trust store Create initial odbc tests folder Implement SQLAllocEnv Fix cmake build Implement SQLFreeEnv Fix rest of build errors from `flightsql-odbc` - Fix get info errors - Fix for configuration window - added odbcinst library - Fix system trust store - unit test fixes - Add dependency of ARROW_COMPUTE. `arrow/compute/api.h` is used in `flight_sql`. Adding `ARROW_COMPUTE=ON` during build fixed run time unit tests failures. Implement SQLAllocConnect and SQLFreeConnect Fix build issue from static flight sql driver Lint and code style fixes Re-add deleted submodule parquet-testing clang-format lint fix cpplint lint fix Exclude whereami in rat exclude list C++/CLI lint fix Update parquet-testing to match commit from `main` Address Kou's comments ODBC directory lint fixes Catching the lint fixes outside of `flightsql-odbc` code Fix build warnings that get treated as error Implement SQLSetEnvAttr and SQLGetEnvAttr Implement use of ExecuteWithDiagnostics Doxygen Error Fixes and Address comments from Kou and James Address comments from Kou - Updates License.txt - Update cmake toolchain - Move whereami to `vendored` - Use string_view instead of NOLINT std::string Remove `whereami.cc` from arrow util build We are building whereami.cc as part of odbc Fix include headers to replace <> with "" Address comments from James Implement SQLGetDiagField --- .../odbc/ArrowFlightSqlOdbcConfig.cmake.in | 38 ++ cpp/src/arrow/flight/sql/odbc/CMakeLists.txt | 50 +++ .../sql/odbc/arrow-flight-sql-odbc.pc.in | 27 ++ cpp/src/arrow/flight/sql/odbc/entry_points.cc | 82 +++++ .../primitive_array_accessor_test.cc | 2 +- .../odbc/flight_sql/config/configuration.cc | 6 +- .../flight_sql_result_set_accessors.h | 4 +- .../flight_sql_result_set_metadata.cc | 4 +- .../odbc/flight_sql/flight_sql_ssl_config.h | 4 +- .../flight_sql_stream_chunk_buffer.h | 6 +- .../sql/odbc/flight_sql/get_info_cache.h | 4 +- .../include/flight_sql/flight_sql_driver.h | 4 +- .../sql/odbc/flight_sql/json_converter.h | 2 +- .../flight_sql/record_batch_transformer.h | 4 +- .../flight_sql/scalar_function_reporter.h | 2 +- .../flight/sql/odbc/flight_sql/system_dsn.cc | 3 +- .../odbc/flight_sql/ui/add_property_window.cc | 2 +- .../sql/odbc/flight_sql/ui/custom_window.cc | 4 +- .../flight_sql/ui/dsn_configuration_window.cc | 2 +- .../arrow/flight/sql/odbc/flight_sql/utils.h | 6 +- .../flight/sql/odbc/install/install_amd64.cmd | 36 ++ cpp/src/arrow/flight/sql/odbc/odbc.def | 41 +++ cpp/src/arrow/flight/sql/odbc/odbc_api.cc | 330 ++++++++++++++++++ cpp/src/arrow/flight/sql/odbc/odbc_api.h | 43 +++ .../sql/odbc/odbcabstraction/diagnostics.cc | 6 +- .../sql/odbc/odbcabstraction/encoding.cc | 2 +- .../sql/odbc/odbcabstraction/exceptions.cc | 5 +- .../include/odbcabstraction/diagnostics.h | 4 +- .../include/odbcabstraction/exceptions.h | 2 +- .../odbc_impl/attribute_utils.h | 8 +- .../odbc_impl/encoding_utils.h | 4 +- .../odbc_impl/odbc_connection.h | 4 +- .../odbc_impl/odbc_descriptor.h | 2 +- .../odbcabstraction/odbc_impl/odbc_handle.h | 8 +- .../odbc_impl/odbc_statement.h | 6 +- .../include/odbcabstraction/spi/connection.h | 4 +- .../include/odbcabstraction/spi/driver.h | 4 +- .../include/odbcabstraction/spi/result_set.h | 4 +- .../odbcabstraction/spi/result_set_metadata.h | 2 +- .../include/odbcabstraction/types.h | 2 +- .../include/odbcabstraction/utils.h | 4 +- .../flight/sql/odbc/odbcabstraction/logger.cc | 2 +- .../odbc_impl/odbc_connection.cc | 4 +- .../odbc_impl/odbc_environment.cc | 11 +- .../odbc_impl/odbc_statement.cc | 21 +- .../flight/sql/odbc/tests/CMakeLists.txt | 26 ++ .../flight/sql/odbc/tests/connection_test.cc | 310 ++++++++++++++++ cpp/src/arrow/flight/sql/odbc/visibility.h | 48 +++ 48 files changed, 1116 insertions(+), 83 deletions(-) create mode 100644 cpp/src/arrow/flight/sql/odbc/ArrowFlightSqlOdbcConfig.cmake.in create mode 100644 cpp/src/arrow/flight/sql/odbc/arrow-flight-sql-odbc.pc.in create mode 100644 cpp/src/arrow/flight/sql/odbc/entry_points.cc create mode 100644 cpp/src/arrow/flight/sql/odbc/install/install_amd64.cmd create mode 100644 cpp/src/arrow/flight/sql/odbc/odbc.def create mode 100644 cpp/src/arrow/flight/sql/odbc/odbc_api.cc create mode 100644 cpp/src/arrow/flight/sql/odbc/odbc_api.h create mode 100644 cpp/src/arrow/flight/sql/odbc/tests/CMakeLists.txt create mode 100644 cpp/src/arrow/flight/sql/odbc/tests/connection_test.cc create mode 100644 cpp/src/arrow/flight/sql/odbc/visibility.h diff --git a/cpp/src/arrow/flight/sql/odbc/ArrowFlightSqlOdbcConfig.cmake.in b/cpp/src/arrow/flight/sql/odbc/ArrowFlightSqlOdbcConfig.cmake.in new file mode 100644 index 00000000000..da6d44ebc82 --- /dev/null +++ b/cpp/src/arrow/flight/sql/odbc/ArrowFlightSqlOdbcConfig.cmake.in @@ -0,0 +1,38 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# This config sets the following variables in your project:: +# +# ArrowFlightSqlOdbc_FOUND - true if Arrow Flight SQL ODBC found on the system +# +# This config sets the following targets in your project:: +# +# ArrowFlightSqlOdbc::arrow_flight_sql_odbc_shared - for linked as shared library if shared library is built +# ArrowFlightSqlOdbc::arrow_flight_sql_odbc_static - for linked as static library if static library is built + +@PACKAGE_INIT@ + +include(CMakeFindDependencyMacro) +find_dependency(ArrowFlightSql) + +include("${CMAKE_CURRENT_LIST_DIR}/ArrowFlightSqlOdbcTargets.cmake") + +arrow_keep_backward_compatibility(ArrowFlightSqlOdbc arrow_flight_sql_odbc) + +check_required_components(ArrowFlightSqlOdbc) + +arrow_show_details(ArrowFlightSqlOdbc ARROW_FLIGHT_SQL_ODBC) diff --git a/cpp/src/arrow/flight/sql/odbc/CMakeLists.txt b/cpp/src/arrow/flight/sql/odbc/CMakeLists.txt index 80be0dee99f..7be9758626f 100644 --- a/cpp/src/arrow/flight/sql/odbc/CMakeLists.txt +++ b/cpp/src/arrow/flight/sql/odbc/CMakeLists.txt @@ -19,3 +19,53 @@ add_custom_target(arrow_flight_sql_odbc) add_subdirectory(flight_sql) add_subdirectory(odbcabstraction) +add_subdirectory(tests) + +arrow_install_all_headers("arrow/flight/sql/odbc") + +set(ARROW_FLIGHT_SQL_ODBC_SRCS entry_points.cc odbc_api.cc) + +if(WIN32) + list(APPEND ARROW_FLIGHT_SQL_ODBC_SRCS odbc.def) +endif() + +if(WIN32) + if(MSVC_VERSION GREATER_EQUAL 1900) + set(ODBCINST legacy_stdio_definitions odbccp32 shlwapi) + endif() +elseif(APPLE) + set(ODBCINST iodbcinst) +else() + set(ODBCINST odbcinst) +endif() + +add_arrow_lib(arrow_flight_sql_odbc + CMAKE_PACKAGE_NAME + ArrowFlightSqlOdbc + PKG_CONFIG_NAME + arrow-flight-sql-odbc + OUTPUTS + ARROW_FLIGHT_SQL_ODBC_LIBRARIES + SOURCES + ${ARROW_FLIGHT_SQL_ODBC_SRCS} + DEPENDENCIES + arrow_flight_sql + SHARED_LINK_FLAGS + ${ARROW_VERSION_SCRIPT_FLAGS} # Defined in cpp/arrow/CMakeLists.txt + SHARED_LINK_LIBS + arrow_flight_sql_shared + SHARED_INSTALL_INTERFACE_LIBS + ArrowFlight::arrow_flight_sql_shared + STATIC_LINK_LIBS + arrow_flight_sql_static + STATIC_INSTALL_INTERFACE_LIBS + ArrowFlight::arrow_flight_sql_static + SHARED_PRIVATE_LINK_LIBS + ${ODBC_LIBRARIES} + ${ODBCINST} + odbcabstraction + arrow_odbc_spi_impl) + +foreach(LIB_TARGET ${ARROW_FLIGHT_SQL_ODBC_LIBRARIES}) + target_compile_definitions(${LIB_TARGET} PRIVATE ARROW_FLIGHT_SQL_ODBC_EXPORTING) +endforeach() diff --git a/cpp/src/arrow/flight/sql/odbc/arrow-flight-sql-odbc.pc.in b/cpp/src/arrow/flight/sql/odbc/arrow-flight-sql-odbc.pc.in new file mode 100644 index 00000000000..78959034954 --- /dev/null +++ b/cpp/src/arrow/flight/sql/odbc/arrow-flight-sql-odbc.pc.in @@ -0,0 +1,27 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +prefix=@CMAKE_INSTALL_PREFIX@ +includedir=@ARROW_PKG_CONFIG_INCLUDEDIR@ +libdir=@ARROW_PKG_CONFIG_LIBDIR@ + +Name: Apache Arrow Flight SQL ODBC +Description: Apache Arrow Flight SQL ODBC extension +Version: @ARROW_VERSION@ +Requires: arrow-flight-sql +Libs: -L${libdir} -larrow_flight_sql_odbc +Cflags.private: -DARROW_FLIGHT_SQL_ODBC_STATIC diff --git a/cpp/src/arrow/flight/sql/odbc/entry_points.cc b/cpp/src/arrow/flight/sql/odbc/entry_points.cc new file mode 100644 index 00000000000..ce91e88e053 --- /dev/null +++ b/cpp/src/arrow/flight/sql/odbc/entry_points.cc @@ -0,0 +1,82 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +#ifdef _WIN32 +# include +#endif + +#include +#include +#include +#include + +#include "arrow/flight/sql/odbc/odbc_api.h" +#include "arrow/flight/sql/odbc/visibility.h" + +SQLRETURN SQL_API SQLAllocHandle(SQLSMALLINT type, SQLHANDLE parent, SQLHANDLE* result) { + return arrow::SQLAllocHandle(type, parent, result); +} + +SQLRETURN SQL_API SQLAllocEnv(SQLHENV* env) { + return arrow::SQLAllocHandle(SQL_HANDLE_ENV, SQL_NULL_HANDLE, env); +} + +SQLRETURN SQL_API SQLAllocConnect(SQLHENV env, SQLHDBC* conn) { + return arrow::SQLAllocHandle(SQL_HANDLE_DBC, env, conn); +} + +SQLRETURN SQL_API SQLFreeHandle(SQLSMALLINT type, SQLHANDLE handle) { + return arrow::SQLFreeHandle(type, handle); +} + +SQLRETURN SQL_API SQLFreeEnv(SQLHENV env) { + return arrow::SQLFreeHandle(SQL_HANDLE_ENV, env); +} + +SQLRETURN SQL_API SQLFreeConnect(SQLHDBC conn) { + return arrow::SQLFreeHandle(SQL_HANDLE_DBC, conn); +} + +SQLRETURN SQL_API SQLGetDiagFieldW(SQLSMALLINT handleType, SQLHANDLE handle, + SQLSMALLINT recNumber, SQLSMALLINT diagIdentifier, + SQLPOINTER diagInfoPtr, SQLSMALLINT bufferLength, + SQLSMALLINT* stringLengthPtr) { + return arrow::SQLGetDiagFieldW(handleType, handle, recNumber, diagIdentifier, + diagInfoPtr, bufferLength, stringLengthPtr); +} + +SQLRETURN SQL_API SQLGetEnvAttr(SQLHENV env, SQLINTEGER attr, SQLPOINTER valuePtr, + SQLINTEGER bufferLen, SQLINTEGER* strLenPtr) { + return arrow::SQLGetEnvAttr(env, attr, valuePtr, bufferLen, strLenPtr); +} + +SQLRETURN SQL_API SQLSetEnvAttr(SQLHENV env, SQLINTEGER attr, SQLPOINTER valuePtr, + SQLINTEGER strLen) { + return arrow::SQLSetEnvAttr(env, attr, valuePtr, strLen); +} + +SQLRETURN SQL_API SQLDriverConnect(SQLHDBC conn, SQLHWND windowHandle, + SQLWCHAR* inConnectionString, + SQLSMALLINT inConnectionStringLen, + SQLWCHAR* outConnectionString, + SQLSMALLINT outConnectionStringBufferLen, + SQLSMALLINT* outConnectionStringLen, + SQLUSMALLINT driverCompletion) { + // TODO: implement SQLDriverConnect by linking to `odbc_impl` //-AL- TODO: create GitHub + // issue for SQLDriverConnect implementation + return SQL_INVALID_HANDLE; +} diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/accessors/primitive_array_accessor_test.cc b/cpp/src/arrow/flight/sql/odbc/flight_sql/accessors/primitive_array_accessor_test.cc index 820c0a7bd84..abf18fa9ce8 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/accessors/primitive_array_accessor_test.cc +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/accessors/primitive_array_accessor_test.cc @@ -16,7 +16,7 @@ // under the License. #include "arrow/flight/sql/odbc/flight_sql/accessors/primitive_array_accessor.h" -#include +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/diagnostics.h" #include "arrow/testing/builder.h" #include "gtest/gtest.h" diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/config/configuration.cc b/cpp/src/arrow/flight/sql/odbc/flight_sql/config/configuration.cc index be92be057da..4eb7d5980c2 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/config/configuration.cc +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/config/configuration.cc @@ -38,17 +38,15 @@ std::string ReadDsnString(const std::string& dsn, const std::string_view& key, const std::string& dflt = "") { #define BUFFER_SIZE (1024) std::vector buf(BUFFER_SIZE); - - std::string key_str = std::string(key); int ret = - SQLGetPrivateProfileString(dsn.c_str(), key_str.c_str(), dflt.c_str(), buf.data(), + SQLGetPrivateProfileString(dsn.c_str(), key.data(), dflt.c_str(), buf.data(), static_cast(buf.size()), "ODBC.INI"); if (ret > BUFFER_SIZE) { // If there wasn't enough space, try again with the right size buffer. buf.resize(ret + 1); ret = - SQLGetPrivateProfileString(dsn.c_str(), key_str.c_str(), dflt.c_str(), buf.data(), + SQLGetPrivateProfileString(dsn.c_str(), key.data(), dflt.c_str(), buf.data(), static_cast(buf.size()), "ODBC.INI"); } diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_result_set_accessors.h b/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_result_set_accessors.h index 3f7d6856083..1d5014140ef 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_result_set_accessors.h +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_result_set_accessors.h @@ -17,9 +17,9 @@ #pragma once -#include -#include #include +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/types.h" +#include "arrow/type_fwd.h" namespace driver { namespace flight_sql { diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_result_set_metadata.cc b/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_result_set_metadata.cc index f863d4bc489..035390981c8 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_result_set_metadata.cc +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_result_set_metadata.cc @@ -16,10 +16,10 @@ // under the License. #include "arrow/flight/sql/odbc/flight_sql/flight_sql_result_set_metadata.h" -#include -#include +#include "arrow/flight/sql/column_metadata.h" #include "arrow/flight/sql/odbc/flight_sql/utils.h" #include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/platform.h" +#include "arrow/util/key_value_metadata.h" #include #include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/exceptions.h" diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_ssl_config.h b/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_ssl_config.h index 76a54f13ce1..2369f0aab4d 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_ssl_config.h +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_ssl_config.h @@ -17,9 +17,9 @@ #pragma once -#include -#include #include +#include "arrow/flight/types.h" +#include "arrow/status.h" namespace driver { namespace flight_sql { diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_stream_chunk_buffer.h b/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_stream_chunk_buffer.h index 4a84bcbede0..864c025d8b3 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_stream_chunk_buffer.h +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_stream_chunk_buffer.h @@ -17,9 +17,9 @@ #pragma once -#include -#include -#include +#include "arrow/flight/client.h" +#include "arrow/flight/sql/client.h" +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/blocking_queue.h" namespace driver { namespace flight_sql { diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/get_info_cache.h b/cpp/src/arrow/flight/sql/odbc/flight_sql/get_info_cache.h index a54dda2e13b..819b095e6a6 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/get_info_cache.h +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/get_info_cache.h @@ -17,12 +17,12 @@ #pragma once -#include -#include #include #include #include #include +#include "arrow/flight/sql/client.h" +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/spi/connection.h" namespace driver { namespace flight_sql { diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/include/flight_sql/flight_sql_driver.h b/cpp/src/arrow/flight/sql/odbc/flight_sql/include/flight_sql/flight_sql_driver.h index 88460cdf5b2..48f2a16416a 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/include/flight_sql/flight_sql_driver.h +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/include/flight_sql/flight_sql_driver.h @@ -17,8 +17,8 @@ #pragma once -#include -#include +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/diagnostics.h" +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/spi/driver.h" namespace driver { namespace flight_sql { diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/json_converter.h b/cpp/src/arrow/flight/sql/odbc/flight_sql/json_converter.h index de466af4f77..83809265df4 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/json_converter.h +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/json_converter.h @@ -17,8 +17,8 @@ #pragma once -#include #include +#include "arrow/type_fwd.h" namespace driver { namespace flight_sql { diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/record_batch_transformer.h b/cpp/src/arrow/flight/sql/odbc/flight_sql/record_batch_transformer.h index 261b8c1d7c0..15c482cc631 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/record_batch_transformer.h +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/record_batch_transformer.h @@ -17,9 +17,9 @@ #pragma once -#include -#include #include +#include "arrow/flight/client.h" +#include "arrow/type.h" namespace driver { namespace flight_sql { diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/scalar_function_reporter.h b/cpp/src/arrow/flight/sql/odbc/flight_sql/scalar_function_reporter.h index 5c2ae06cdba..fd6abf6420e 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/scalar_function_reporter.h +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/scalar_function_reporter.h @@ -17,7 +17,7 @@ #pragma once -#include +#include "arrow/type.h" namespace driver { namespace flight_sql { diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/system_dsn.cc b/cpp/src/arrow/flight/sql/odbc/flight_sql/system_dsn.cc index 504b62a81eb..67a4c3db3d3 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/system_dsn.cc +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/system_dsn.cc @@ -125,8 +125,7 @@ bool RegisterDsn(const Configuration& config, LPCSTR driver) { continue; } - std::string key_str = std::string(key); - if (!SQLWritePrivateProfileString(dsn.c_str(), key_str.c_str(), it->second.c_str(), + if (!SQLWritePrivateProfileString(dsn.c_str(), key.data(), it->second.c_str(), "ODBC.INI")) { PostLastInstallerError(); return false; diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/ui/add_property_window.cc b/cpp/src/arrow/flight/sql/odbc/flight_sql/ui/add_property_window.cc index 64cc1797f7e..75aa491f781 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/ui/add_property_window.cc +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/ui/add_property_window.cc @@ -24,7 +24,7 @@ #include -#include +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/exceptions.h" #include "ui/custom_window.h" #include "ui/window.h" diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/ui/custom_window.cc b/cpp/src/arrow/flight/sql/odbc/flight_sql/ui/custom_window.cc index 5443ea0ec8d..e79e1221e78 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/ui/custom_window.cc +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/ui/custom_window.cc @@ -17,7 +17,7 @@ // platform.h includes windows.h, so it needs to be included // before Windowsx.h and commctrl.h -#include +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/platform.h" #include #include @@ -25,7 +25,7 @@ #include #include -#include +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/exceptions.h" #include "ui/custom_window.h" namespace driver { diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/ui/dsn_configuration_window.cc b/cpp/src/arrow/flight/sql/odbc/flight_sql/ui/dsn_configuration_window.cc index 42741c5a3e5..c47984ca400 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/ui/dsn_configuration_window.cc +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/ui/dsn_configuration_window.cc @@ -20,11 +20,11 @@ #include #include -#include #include #include #include #include +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/utils.h" #include "arrow/flight/sql/odbc/flight_sql/include/flight_sql/ui/add_property_window.h" diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/utils.h b/cpp/src/arrow/flight/sql/odbc/flight_sql/utils.h index 586cfb22a30..8b3e14599a7 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/utils.h +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/utils.h @@ -17,13 +17,13 @@ #pragma once -#include -#include -#include #include #include #include #include +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/exceptions.h" +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/types.h" +#include "arrow/flight/types.h" namespace driver { namespace flight_sql { diff --git a/cpp/src/arrow/flight/sql/odbc/install/install_amd64.cmd b/cpp/src/arrow/flight/sql/odbc/install/install_amd64.cmd new file mode 100644 index 00000000000..fe365d59b90 --- /dev/null +++ b/cpp/src/arrow/flight/sql/odbc/install/install_amd64.cmd @@ -0,0 +1,36 @@ +@echo off + +set ODBC_AMD64=%1 + +@REM enable delayed variable expansion to make environment variables enclosed with "!" to be evaluated +@REM when the command is executed instead of when the command is parsed +setlocal enableextensions enabledelayedexpansion + +if [%ODBC_AMD64%] == [] ( + echo error: 64-bit driver is not specified. Call format: install_amd64 abs_path_to_64_bit_driver + pause + exit /b 1 +) + +if exist %ODBC_AMD64% ( + for %%i IN (%ODBC_AMD64%) DO IF EXIST %%~si\NUL ( + echo warning: The path you have specified seems to be a directory. Note that you have to specify path to driver file itself instead. + ) + echo Installing 64-bit driver: %ODBC_AMD64% + reg add "HKEY_LOCAL_MACHINE\SOFTWARE\ODBC\ODBCINST.INI\Apache Arrow Flight SQL ODBC Driver" /v DriverODBCVer /t REG_SZ /d "03.80" /f + reg add "HKEY_LOCAL_MACHINE\SOFTWARE\ODBC\ODBCINST.INI\Apache Arrow Flight SQL ODBC Driver" /v UsageCount /t REG_DWORD /d 00000001 /f + reg add "HKEY_LOCAL_MACHINE\SOFTWARE\ODBC\ODBCINST.INI\Apache Arrow Flight SQL ODBC Driver" /v Driver /t REG_SZ /d %ODBC_AMD64% /f + reg add "HKEY_LOCAL_MACHINE\SOFTWARE\ODBC\ODBCINST.INI\Apache Arrow Flight SQL ODBC Driver" /v Setup /t REG_SZ /d %ODBC_AMD64% /f + reg add "HKEY_LOCAL_MACHINE\SOFTWARE\ODBC\ODBCINST.INI\ODBC Drivers" /v "Apache Arrow Flight SQL ODBC Driver" /t REG_SZ /d "Installed" /f + + IF !ERRORLEVEL! NEQ 0 ( + echo Error occurred while registering 64-bit driver. Exiting. + echo ERRORLEVEL: !ERRORLEVEL! + exit !ERRORLEVEL! + ) +) else ( + echo 64-bit driver can not be found: %ODBC_AMD64% + echo Call format: install_amd64 abs_path_to_64_bit_driver + pause + exit /b 1 +) diff --git a/cpp/src/arrow/flight/sql/odbc/odbc.def b/cpp/src/arrow/flight/sql/odbc/odbc.def new file mode 100644 index 00000000000..2c93d183c92 --- /dev/null +++ b/cpp/src/arrow/flight/sql/odbc/odbc.def @@ -0,0 +1,41 @@ + +LIBRARY arrow_flight_sql_odbc +EXPORTS + SQLAllocConnect + SQLAllocEnv + SQLAllocHandle + SQLAllocStmt + SQLBindCol + SQLCancel + SQLCloseCursor + SQLColAttributeW + SQLColumnsW + SQLConnectW + SQLDisconnect + SQLDriverConnectW + SQLErrorW + SQLExecDirectW + SQLExecute + SQLFetch + SQLForeignKeysW + SQLFreeEnv + SQLFreeHandle + SQLFreeStmt + SQLGetConnectAttrW + SQLGetData + SQLGetDiagFieldW + SQLGetDiagRecW + SQLGetEnvAttr + SQLGetInfoW + SQLGetStmtAttrW + SQLGetTypeInfoW + SQLMoreResults + SQLNativeSqlW + SQLNumResultCols + SQLPrepareW + SQLPrimaryKeysW + SQLSetConnectAttrW + SQLSetEnvAttr + SQLSetStmtAttrW + SQLTablesW + diff --git a/cpp/src/arrow/flight/sql/odbc/odbc_api.cc b/cpp/src/arrow/flight/sql/odbc/odbc_api.cc new file mode 100644 index 00000000000..a57a371044e --- /dev/null +++ b/cpp/src/arrow/flight/sql/odbc/odbc_api.cc @@ -0,0 +1,330 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +#include +#include +#include +#include +#include +#include + +// odbc_api includes windows.h, which needs to be put behind winsock2.h. +// odbc_environment.h includes winsock2.h +#include + +namespace arrow { +SQLRETURN SQLAllocHandle(SQLSMALLINT type, SQLHANDLE parent, SQLHANDLE* result) { + // TODO: implement SQLAllocHandle by linking to `odbc_impl` + *result = nullptr; + + switch (type) { + case SQL_HANDLE_ENV: { + using driver::flight_sql::FlightSqlDriver; + using ODBC::ODBCEnvironment; + + *result = SQL_NULL_HENV; + + try { + static std::shared_ptr odbc_driver = + std::make_shared(); + *result = reinterpret_cast(new ODBCEnvironment(odbc_driver)); + + return SQL_SUCCESS; + } catch (const std::bad_alloc&) { + // allocating environment failed so cannot log diagnostic error here + return SQL_ERROR; + } + } + + case SQL_HANDLE_DBC: { + using ODBC::ODBCConnection; + using ODBC::ODBCEnvironment; + + *result = SQL_NULL_HDBC; + + ODBCEnvironment* environment = reinterpret_cast(parent); + + return ODBCEnvironment::ExecuteWithDiagnostics(environment, SQL_ERROR, [=]() { + std::shared_ptr conn = environment->CreateConnection(); + + if (conn) { + *result = reinterpret_cast(conn.get()); + + return SQL_SUCCESS; + } + + return SQL_ERROR; + }); + } + + case SQL_HANDLE_STMT: { + return SQL_INVALID_HANDLE; + } + + default: + break; + } + + return SQL_ERROR; +} + +SQLRETURN SQLFreeHandle(SQLSMALLINT type, SQLHANDLE handle) { + switch (type) { + case SQL_HANDLE_ENV: { + using ODBC::ODBCEnvironment; + + ODBCEnvironment* environment = reinterpret_cast(handle); + + if (!environment) { + return SQL_INVALID_HANDLE; + } + + delete environment; + + return SQL_SUCCESS; + } + + case SQL_HANDLE_DBC: { + using ODBC::ODBCConnection; + + ODBCConnection* conn = reinterpret_cast(handle); + + return ODBCConnection::ExecuteWithDiagnostics(conn, SQL_ERROR, [=]() { + conn->releaseConnection(); + + return SQL_SUCCESS; + }); + } + + case SQL_HANDLE_STMT: + return SQL_INVALID_HANDLE; + + case SQL_HANDLE_DESC: + return SQL_INVALID_HANDLE; + + default: + break; + } + + return SQL_ERROR; +} + +SQLRETURN SQLGetDiagFieldW(SQLSMALLINT handleType, SQLHANDLE handle, + SQLSMALLINT recNumber, SQLSMALLINT diagIdentifier, + SQLPOINTER diagInfoPtr, SQLSMALLINT bufferLength, + SQLSMALLINT* stringLengthPtr) { + using driver::odbcabstraction::Diagnostics; + using ODBC::GetStringAttribute; + using ODBC::ODBCConnection; + using ODBC::ODBCEnvironment; + + if (!handle) { + return SQL_INVALID_HANDLE; + } + + if (!diagInfoPtr) { + return SQL_ERROR; + } + + // Set character type to be Unicode by defualt (not Ansi) + bool isUnicode = true; + Diagnostics* diagnostics = nullptr; + + switch (handleType) { + case SQL_HANDLE_ENV: { + ODBCEnvironment* environment = reinterpret_cast(handle); + diagnostics = &environment->GetDiagnostics(); + break; + } + + case SQL_HANDLE_DBC: { + ODBCConnection* connection = reinterpret_cast(handle); + diagnostics = &connection->GetDiagnostics(); + break; + } + + default: + return SQL_ERROR; + } + + if (!diagnostics) { + return SQL_ERROR; + } + + // Retrieve header level diagnostics if Record 0 specified + if (recNumber == 0) { + switch (diagIdentifier) { + case SQL_DIAG_NUMBER: { + SQLINTEGER count = static_cast(diagnostics->GetRecordCount()); + *static_cast(diagInfoPtr) = count; + if (stringLengthPtr) { + *stringLengthPtr = sizeof(SQLINTEGER); + } + + return SQL_SUCCESS; + } + + case SQL_DIAG_SERVER_NAME: { + const std::string source = diagnostics->GetDataSourceComponent(); + return GetStringAttribute(isUnicode, source, false, diagInfoPtr, bufferLength, + stringLengthPtr, *diagnostics); + } + + default: + return SQL_ERROR; + } + } + + // Retrieve record level diagnostics from specified 1 based record + uint32_t recordIndex = static_cast(recNumber - 1); + if (!diagnostics->HasRecord(recordIndex)) { + return SQL_NO_DATA; + } + + // Retrieve record field data + switch (diagIdentifier) { + case SQL_DIAG_MESSAGE_TEXT: { + const std::string message = diagnostics->GetMessageText(recordIndex); + return GetStringAttribute(isUnicode, message, false, diagInfoPtr, bufferLength, + stringLengthPtr, *diagnostics); + } + + case SQL_DIAG_NATIVE: { + *static_cast(diagInfoPtr) = diagnostics->GetNativeError(recordIndex); + if (stringLengthPtr) { + *stringLengthPtr = sizeof(SQLINTEGER); + } + + return SQL_SUCCESS; + } + + case SQL_DIAG_SQLSTATE: { + const std::string state = diagnostics->GetSQLState(recordIndex); + return GetStringAttribute(isUnicode, state, false, diagInfoPtr, bufferLength, + stringLengthPtr, *diagnostics); + } + + default: + return SQL_ERROR; + } + + return SQL_ERROR; +} + +SQLRETURN SQLGetEnvAttr(SQLHENV env, SQLINTEGER attr, SQLPOINTER valuePtr, + SQLINTEGER bufferLen, SQLINTEGER* strLenPtr) { + using driver::odbcabstraction::DriverException; + using ODBC::ODBCEnvironment; + + ODBCEnvironment* environment = reinterpret_cast(env); + + return ODBCEnvironment::ExecuteWithDiagnostics(environment, SQL_ERROR, [=]() { + switch (attr) { + case SQL_ATTR_ODBC_VERSION: { + if (!valuePtr && !strLenPtr) { + throw DriverException("Invalid null pointer for attribute.", "HY000"); + } + + if (valuePtr) { + SQLINTEGER* value = reinterpret_cast(valuePtr); + *value = static_cast(environment->getODBCVersion()); + } + + if (strLenPtr) { + *strLenPtr = sizeof(SQLINTEGER); + } + + return SQL_SUCCESS; + } + + case SQL_ATTR_OUTPUT_NTS: { + if (!valuePtr && !strLenPtr) { + throw DriverException("Invalid null pointer for attribute.", "HY000"); + } + + if (valuePtr) { + // output nts always returns SQL_TRUE + SQLINTEGER* value = reinterpret_cast(valuePtr); + *value = SQL_TRUE; + } + + if (strLenPtr) { + *strLenPtr = sizeof(SQLINTEGER); + } + + return SQL_SUCCESS; + } + + case SQL_ATTR_CONNECTION_POOLING: + case SQL_ATTR_APP_ROW_DESC: { + throw DriverException("Optional feature not supported.", "HYC00"); + } + + default: { + throw DriverException("Invalid attribute", "HYC00"); + } + } + }); +} + +SQLRETURN SQLSetEnvAttr(SQLHENV env, SQLINTEGER attr, SQLPOINTER valuePtr, + SQLINTEGER strLen) { + using driver::odbcabstraction::DriverException; + using ODBC::ODBCEnvironment; + + ODBCEnvironment* environment = reinterpret_cast(env); + + return ODBCEnvironment::ExecuteWithDiagnostics(environment, SQL_ERROR, [=]() { + if (!valuePtr) { + throw DriverException("Invalid null pointer for attribute.", "HY024"); + } + + switch (attr) { + case SQL_ATTR_ODBC_VERSION: { + SQLINTEGER version = + static_cast(reinterpret_cast(valuePtr)); + if (version == SQL_OV_ODBC2 || version == SQL_OV_ODBC3) { + environment->setODBCVersion(version); + + return SQL_SUCCESS; + } else { + throw DriverException("Invalid value for attribute", "HY024"); + } + } + + case SQL_ATTR_OUTPUT_NTS: { + // output nts can not be set to SQL_FALSE, is always SQL_TRUE + SQLINTEGER value = static_cast(reinterpret_cast(valuePtr)); + if (value == SQL_TRUE) { + return SQL_SUCCESS; + } else { + throw DriverException("Invalid value for attribute", "HY024"); + } + } + + case SQL_ATTR_CONNECTION_POOLING: + case SQL_ATTR_APP_ROW_DESC: { + throw DriverException("Optional feature not supported.", "HYC00"); + } + + default: { + throw DriverException("Invalid attribute", "HY092"); + } + } + }); +} +} // namespace arrow diff --git a/cpp/src/arrow/flight/sql/odbc/odbc_api.h b/cpp/src/arrow/flight/sql/odbc/odbc_api.h new file mode 100644 index 00000000000..6c204fe3ae3 --- /dev/null +++ b/cpp/src/arrow/flight/sql/odbc/odbc_api.h @@ -0,0 +1,43 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +#pragma once + +#ifdef _WIN32 +# include +#endif + +#include +#include +#include + +// @file odbc_api.h +// +// Define internal ODBC API function headers. + +namespace arrow { +SQLRETURN SQLAllocHandle(SQLSMALLINT type, SQLHANDLE parent, SQLHANDLE* result); +SQLRETURN SQLFreeHandle(SQLSMALLINT type, SQLHANDLE handle); +SQLRETURN SQLGetDiagFieldW(SQLSMALLINT handleType, SQLHANDLE handle, + SQLSMALLINT recNumber, SQLSMALLINT diagIdentifier, + SQLPOINTER diagInfoPtr, SQLSMALLINT bufferLength, + SQLSMALLINT* stringLengthPtr); +SQLRETURN SQLGetEnvAttr(SQLHENV env, SQLINTEGER attr, SQLPOINTER valuePtr, + SQLINTEGER bufferLen, SQLINTEGER* strLenPtr); +SQLRETURN SQLSetEnvAttr(SQLHENV env, SQLINTEGER attr, SQLPOINTER valuePtr, + SQLINTEGER strLen); +} // namespace arrow diff --git a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/diagnostics.cc b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/diagnostics.cc index 8c94978ef99..78ca45ea2fe 100644 --- a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/diagnostics.cc +++ b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/diagnostics.cc @@ -15,9 +15,9 @@ // specific language governing permissions and limitations // under the License. -#include -#include -#include +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/diagnostics.h" +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/platform.h" +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/types.h" #include diff --git a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/encoding.cc b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/encoding.cc index 95dc920da78..00718cdbbe5 100644 --- a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/encoding.cc +++ b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/encoding.cc @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. -#include +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/encoding.h" #if defined(__APPLE__) # include diff --git a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/exceptions.cc b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/exceptions.cc index fcd8163a500..242c85e5a28 100644 --- a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/exceptions.cc +++ b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/exceptions.cc @@ -15,8 +15,9 @@ // specific language governing permissions and limitations // under the License. -#include -#include +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/exceptions.h" +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/platform.h" + #include namespace driver { diff --git a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/diagnostics.h b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/diagnostics.h index f1c6efe4982..473411efd4f 100644 --- a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/diagnostics.h +++ b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/diagnostics.h @@ -21,8 +21,8 @@ #include #include -#include -#include +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/exceptions.h" +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/types.h" namespace driver { namespace odbcabstraction { diff --git a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/exceptions.h b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/exceptions.h index 48a773e4f4d..82ffebedff6 100644 --- a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/exceptions.h +++ b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/exceptions.h @@ -17,10 +17,10 @@ #pragma once -#include #include #include #include +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/error_codes.h" namespace driver { namespace odbcabstraction { diff --git a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/attribute_utils.h b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/attribute_utils.h index 9163e942ceb..7b3b457a35f 100644 --- a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/attribute_utils.h +++ b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/attribute_utils.h @@ -17,16 +17,16 @@ #pragma once -#include -#include -#include #include #include #include #include #include +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/diagnostics.h" +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/exceptions.h" +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/platform.h" -#include +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/encoding_utils.h" namespace ODBC { using driver::odbcabstraction::WcsToUtf8; diff --git a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/encoding_utils.h b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/encoding_utils.h index 25619bb5555..01eae6f059a 100644 --- a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/encoding_utils.h +++ b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/encoding_utils.h @@ -16,9 +16,9 @@ // under the License. #pragma once +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/encoding.h" +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/platform.h" -#include -#include #include #include #include diff --git a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/odbc_connection.h b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/odbc_connection.h index 6a01fe128d9..e771f467e6e 100644 --- a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/odbc_connection.h +++ b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/odbc_connection.h @@ -17,9 +17,9 @@ #pragma once -#include +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/odbc_handle.h" +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/spi/connection.h" -#include #include #include #include diff --git a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/odbc_descriptor.h b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/odbc_descriptor.h index 092483f4719..e7656082c5c 100644 --- a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/odbc_descriptor.h +++ b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/odbc_descriptor.h @@ -17,7 +17,7 @@ #pragma once -#include +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/odbc_handle.h" #include #include diff --git a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/odbc_handle.h b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/odbc_handle.h index c2428df394d..64257541a87 100644 --- a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/odbc_handle.h +++ b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/odbc_handle.h @@ -17,12 +17,14 @@ #pragma once -#include -#include +// platform.h includes windows.h, so it needs to be included first +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/platform.h" + #include #include #include #include +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/diagnostics.h" /** * @brief An abstraction over a generic ODBC handle. @@ -47,7 +49,7 @@ class ODBCHandle { rc = function(); } catch (const driver::odbcabstraction::DriverException& ex) { GetDiagnostics().AddError(ex); - } catch (const std::bad_alloc& ex) { + } catch (const std::bad_alloc&) { GetDiagnostics().AddError(driver::odbcabstraction::DriverException( "A memory allocation error occurred.", "HY001")); } catch (const std::exception& ex) { diff --git a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/odbc_statement.h b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/odbc_statement.h index bbddfac4185..29efaec8280 100644 --- a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/odbc_statement.h +++ b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/odbc_statement.h @@ -17,9 +17,11 @@ #pragma once -#include +// platform.h platform.h includes windows.h so it needs to be included first +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/platform.h" + +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/odbc_handle.h" -#include #include #include #include diff --git a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/spi/connection.h b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/spi/connection.h index 792a52c1fad..64b7e6a724b 100644 --- a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/spi/connection.h +++ b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/spi/connection.h @@ -25,8 +25,8 @@ #include #include -#include -#include +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/diagnostics.h" +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/types.h" namespace driver { namespace odbcabstraction { diff --git a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/spi/driver.h b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/spi/driver.h index f13371bf2d5..61d570574c7 100644 --- a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/spi/driver.h +++ b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/spi/driver.h @@ -19,8 +19,8 @@ #include -#include -#include +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/diagnostics.h" +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/types.h" namespace driver { namespace odbcabstraction { diff --git a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/spi/result_set.h b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/spi/result_set.h index 1b3f8eb96d8..4c12a4b5934 100644 --- a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/spi/result_set.h +++ b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/spi/result_set.h @@ -20,9 +20,9 @@ #include #include -#include +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/platform.h" -#include +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/types.h" namespace driver { namespace odbcabstraction { diff --git a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/spi/result_set_metadata.h b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/spi/result_set_metadata.h index f625a2598c1..636dce21e4a 100644 --- a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/spi/result_set_metadata.h +++ b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/spi/result_set_metadata.h @@ -17,8 +17,8 @@ #pragma once -#include #include +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/types.h" namespace driver { namespace odbcabstraction { diff --git a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/types.h b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/types.h index e5d206a2ca7..8f16000daaa 100644 --- a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/types.h +++ b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/types.h @@ -17,8 +17,8 @@ #pragma once -#include #include +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/platform.h" namespace driver { namespace odbcabstraction { diff --git a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/utils.h b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/utils.h index cc848baa0fd..0fa8463b546 100644 --- a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/utils.h +++ b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/utils.h @@ -17,10 +17,10 @@ #pragma once -#include -#include #include #include +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/logger.h" +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/spi/connection.h" namespace driver { namespace odbcabstraction { diff --git a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/logger.cc b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/logger.cc index edace64cf6a..8b105a2f0b6 100644 --- a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/logger.cc +++ b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/logger.cc @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. -#include +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/logger.h" namespace driver { namespace odbcabstraction { diff --git a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/odbc_impl/odbc_connection.cc b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/odbc_impl/odbc_connection.cc index 0143976bb48..4d5d4dc3656 100644 --- a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/odbc_impl/odbc_connection.cc +++ b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/odbc_impl/odbc_connection.cc @@ -85,9 +85,7 @@ void loadPropertiesFromDSN(const std::string& dsn, for (auto& key : keys) { outputBuffer.clear(); outputBuffer.resize(BUFFER_SIZE, '\0'); - - std::string key_str = std::string(key); - SQLGetPrivateProfileString(dsn.c_str(), key_str.c_str(), "", &outputBuffer[0], + SQLGetPrivateProfileString(dsn.c_str(), key.data(), "", &outputBuffer[0], BUFFER_SIZE, "odbc.ini"); std::string value = std::string(&outputBuffer[0]); diff --git a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/odbc_impl/odbc_environment.cc b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/odbc_impl/odbc_environment.cc index 7781235688f..9d7a8223591 100644 --- a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/odbc_impl/odbc_environment.cc +++ b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/odbc_impl/odbc_environment.cc @@ -15,12 +15,13 @@ // specific language governing permissions and limitations // under the License. -#include +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/odbc_environment.h" + +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/odbc_connection.h" +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/spi/connection.h" +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/spi/driver.h" +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/types.h" -#include -#include -#include -#include #include #include #include diff --git a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/odbc_impl/odbc_statement.cc b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/odbc_impl/odbc_statement.cc index a5db0cc25dd..f6c06060d67 100644 --- a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/odbc_impl/odbc_statement.cc +++ b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/odbc_impl/odbc_statement.cc @@ -15,16 +15,17 @@ // specific language governing permissions and limitations // under the License. -#include - -#include -#include -#include -#include -#include -#include -#include -#include +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/odbc_statement.h" + +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/exceptions.h" +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/attribute_utils.h" +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/odbc_connection.h" +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/odbc_descriptor.h" +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/spi/result_set.h" +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/spi/result_set_metadata.h" +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/spi/statement.h" +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/types.h" + #include #include #include diff --git a/cpp/src/arrow/flight/sql/odbc/tests/CMakeLists.txt b/cpp/src/arrow/flight/sql/odbc/tests/CMakeLists.txt new file mode 100644 index 00000000000..161669b41df --- /dev/null +++ b/cpp/src/arrow/flight/sql/odbc/tests/CMakeLists.txt @@ -0,0 +1,26 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +add_custom_target(tests) + +include_directories(${ODBC_INCLUDE_DIRS}) + +add_arrow_test(connection_test + SOURCES + connection_test.cc + EXTRA_LINK_LIBS + ${ODBC_LIBRARIES}) diff --git a/cpp/src/arrow/flight/sql/odbc/tests/connection_test.cc b/cpp/src/arrow/flight/sql/odbc/tests/connection_test.cc new file mode 100644 index 00000000000..7991ec40263 --- /dev/null +++ b/cpp/src/arrow/flight/sql/odbc/tests/connection_test.cc @@ -0,0 +1,310 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +#ifdef _WIN32 +# include +#endif + +#include +#include +#include +#include "gtest/gtest.h" + +namespace arrow { +namespace flight { +namespace odbc { +namespace integration_tests { + +TEST(SQLAllocHandle, TestSQLAllocHandleEnv) { + // ODBC Environment + SQLHENV env; + + // Allocate an environment handle + SQLAllocHandle(SQL_HANDLE_ENV, SQL_NULL_HANDLE, &env); + + EXPECT_TRUE(env != NULL); +} + +TEST(SQLAllocEnv, TestSQLAllocEnv) { + // ODBC Environment + SQLHENV env; + + // Allocate an environment handle + SQLRETURN return_value = SQLAllocEnv(&env); + + EXPECT_TRUE(return_value == SQL_SUCCESS); +} + +TEST(SQLAllocHandle, TestSQLAllocHandleConnect) { + // ODBC Environment + SQLHENV env; + SQLHDBC conn; + + // Allocate an environment handle + SQLRETURN return_value = SQLAllocEnv(&env); + + EXPECT_TRUE(return_value == SQL_SUCCESS); + + // Allocate a connection using alloc handle + SQLRETURN return_alloc_handle = SQLAllocHandle(SQL_HANDLE_DBC, env, &conn); + + EXPECT_TRUE(return_alloc_handle == SQL_SUCCESS); +} + +TEST(SQLAllocConnect, TestSQLAllocHandleConnect) { + // ODBC Environment + SQLHENV env; + SQLHDBC conn; + + // Allocate an environment handle + SQLRETURN return_value = SQLAllocEnv(&env); + + EXPECT_TRUE(return_value == SQL_SUCCESS); + + // Allocate a connection using alloc handle + SQLRETURN return_alloc_connect = SQLAllocConnect(env, &conn); + + EXPECT_TRUE(return_alloc_connect == SQL_SUCCESS); +} + +TEST(SQLFreeHandle, TestSQLFreeHandleEnv) { + // ODBC Environment + SQLHENV env; + + // Allocate an environment handle + SQLAllocHandle(SQL_HANDLE_ENV, SQL_NULL_HANDLE, &env); + + // Free an environment handle + SQLRETURN return_value = SQLFreeHandle(SQL_HANDLE_ENV, env); + + EXPECT_TRUE(return_value == SQL_SUCCESS); +} + +TEST(SQLFreeEnv, TestSQLFreeEnv) { + // ODBC Environment + SQLHENV env; + + // Allocate an environment handle + SQLAllocHandle(SQL_HANDLE_ENV, SQL_NULL_HANDLE, &env); + + // Free an environment handle + SQLRETURN return_value = SQLFreeEnv(env); + + EXPECT_TRUE(return_value == SQL_SUCCESS); +} + +TEST(SQLFreeHandle, TestSQLFreeHandleConnect) { + // ODBC Environment + SQLHENV env; + SQLHDBC conn; + + // Allocate an environment handle + SQLRETURN return_value = SQLAllocEnv(&env); + + EXPECT_TRUE(return_value == SQL_SUCCESS); + + // Allocate a connection using alloc handle + SQLRETURN return_alloc_handle = SQLAllocHandle(SQL_HANDLE_DBC, env, &conn); + + EXPECT_TRUE(return_alloc_handle == SQL_SUCCESS); + + // Free the created connection using free handle + SQLRETURN return_free_handle = SQLFreeHandle(SQL_HANDLE_DBC, conn); + + EXPECT_TRUE(return_free_handle == SQL_SUCCESS); +} + +TEST(SQLFreeConnect, TestSQLFreeConnect) { + // ODBC Environment + SQLHENV env; + SQLHDBC conn; + + // Allocate an environment handle + SQLRETURN return_env = SQLAllocEnv(&env); + + EXPECT_TRUE(return_env == SQL_SUCCESS); + + // Allocate a connection using alloc handle + SQLRETURN return_alloc_handle = SQLAllocHandle(SQL_HANDLE_DBC, env, &conn); + + EXPECT_TRUE(return_alloc_handle == SQL_SUCCESS); + + // Free the created connection using free connect + SQLRETURN return_free_connect = SQLFreeConnect(conn); + + EXPECT_TRUE(return_free_connect == SQL_SUCCESS); +} + +TEST(SQLGetEnvAttr, TestSQLGetEnvAttrODBCVersion) { + // ODBC Environment + SQLHENV env; + + SQLINTEGER version; + + // Allocate an environment handle + SQLRETURN return_env = SQLAllocEnv(&env); + + EXPECT_TRUE(return_env == SQL_SUCCESS); + + SQLRETURN return_get = SQLGetEnvAttr(env, SQL_ATTR_ODBC_VERSION, &version, 0, 0); + + EXPECT_TRUE(return_get == SQL_SUCCESS); + + EXPECT_EQ(version, SQL_OV_ODBC2); +} + +TEST(SQLSetEnvAttr, TestSQLSetEnvAttrODBCVersionValid) { + // ODBC Environment + SQLHENV env; + + // Allocate an environment handle + SQLRETURN return_env = SQLAllocEnv(&env); + + EXPECT_TRUE(return_env == SQL_SUCCESS); + + // Attempt to set to unsupported version + SQLRETURN return_set = + SQLSetEnvAttr(env, SQL_ATTR_ODBC_VERSION, reinterpret_cast(SQL_OV_ODBC2), 0); + + EXPECT_TRUE(return_set == SQL_SUCCESS); +} + +TEST(SQLSetEnvAttr, TestSQLSetEnvAttrODBCVersionInvalid) { + // ODBC Environment + SQLHENV env; + + // Allocate an environment handle + SQLRETURN return_env = SQLAllocEnv(&env); + + EXPECT_TRUE(return_env == SQL_SUCCESS); + + // Attempt to set to unsupported version + SQLRETURN return_set = + SQLSetEnvAttr(env, SQL_ATTR_ODBC_VERSION, reinterpret_cast(1), 0); + + EXPECT_TRUE(return_set == SQL_ERROR); +} + +TEST(SQLGetEnvAttr, TestSQLGetEnvAttrOutputNTS) { + // ODBC Environment + SQLHENV env; + + SQLINTEGER output_nts; + + // Allocate an environment handle + SQLRETURN return_env = SQLAllocEnv(&env); + + EXPECT_TRUE(return_env == SQL_SUCCESS); + + SQLRETURN return_get = SQLGetEnvAttr(env, SQL_ATTR_OUTPUT_NTS, &output_nts, 0, 0); + + EXPECT_TRUE(return_get == SQL_SUCCESS); + + EXPECT_EQ(output_nts, SQL_TRUE); +} + +TEST(SQLGetEnvAttr, TestSQLGetEnvAttrGetLength) { + GTEST_SKIP(); + // ODBC Environment + SQLHENV env; + + SQLINTEGER length; + + // Allocate an environment handle + SQLRETURN return_env = SQLAllocEnv(&env); + + EXPECT_TRUE(return_env == SQL_SUCCESS); + + SQLRETURN return_get = SQLGetEnvAttr(env, SQL_ATTR_ODBC_VERSION, nullptr, 0, &length); + + EXPECT_TRUE(return_get == SQL_SUCCESS); + + EXPECT_EQ(length, sizeof(SQLINTEGER)); +} + +TEST(SQLGetEnvAttr, TestSQLGetEnvAttrNullValuePointer) { + GTEST_SKIP(); + // ODBC Environment + SQLHENV env; + + // Allocate an environment handle + SQLRETURN return_env = SQLAllocEnv(&env); + + EXPECT_TRUE(return_env == SQL_SUCCESS); + + SQLRETURN return_get = SQLGetEnvAttr(env, SQL_ATTR_ODBC_VERSION, nullptr, 0, nullptr); + + EXPECT_TRUE(return_get == SQL_ERROR); +} + +TEST(SQLSetEnvAttr, TestSQLSetEnvAttrOutputNTSValid) { + // ODBC Environment + SQLHENV env; + + // Allocate an environment handle + SQLRETURN return_env = SQLAllocEnv(&env); + + EXPECT_TRUE(return_env == SQL_SUCCESS); + + // Attempt to set to output nts to supported version + SQLRETURN return_set = + SQLSetEnvAttr(env, SQL_ATTR_OUTPUT_NTS, reinterpret_cast(SQL_TRUE), 0); + + EXPECT_TRUE(return_set == SQL_SUCCESS); +} + +TEST(SQLSetEnvAttr, TestSQLSetEnvAttrOutputNTSInvalid) { + // ODBC Environment + SQLHENV env; + + // Allocate an environment handle + SQLRETURN return_env = SQLAllocEnv(&env); + + EXPECT_TRUE(return_env == SQL_SUCCESS); + + // Attempt to set to output nts to unsupported false + SQLRETURN return_set = + SQLSetEnvAttr(env, SQL_ATTR_OUTPUT_NTS, reinterpret_cast(SQL_FALSE), 0); + + EXPECT_TRUE(return_set == SQL_ERROR); +} + +TEST(SQLSetEnvAttr, TestSQLSetEnvAttrNullValuePointer) { + // ODBC Environment + SQLHENV env; + + // Allocate an environment handle + SQLRETURN return_env = SQLAllocEnv(&env); + + EXPECT_TRUE(return_env == SQL_SUCCESS); + + // Attempt to set using bad data pointer + SQLRETURN return_set = + SQLSetEnvAttr(env, SQL_ATTR_ODBC_VERSION, nullptr, 0); + + EXPECT_TRUE(return_set == SQL_ERROR); +} + +} // namespace integration_tests +} // namespace odbc +} // namespace flight +} // namespace arrow + +int main(int argc, char** argv) { + ::testing::InitGoogleTest(&argc, argv); + return RUN_ALL_TESTS(); +} diff --git a/cpp/src/arrow/flight/sql/odbc/visibility.h b/cpp/src/arrow/flight/sql/odbc/visibility.h new file mode 100644 index 00000000000..416dfecc864 --- /dev/null +++ b/cpp/src/arrow/flight/sql/odbc/visibility.h @@ -0,0 +1,48 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +#pragma once + +#if defined(_WIN32) || defined(__CYGWIN__) +# if defined(_MSC_VER) +# pragma warning(push) +# pragma warning(disable : 4251) +# else +# pragma GCC diagnostic ignored "-Wattributes" +# endif + +# ifdef ARROW_FLIGHT_SQL_ODBC_STATIC +# define ARROW_FLIGHT_SQL_ODBC_EXPORT +# elif defined(ARROW_FLIGHT_SQL_ODBC_EXPORTING) +# define ARROW_FLIGHT_SQL_ODBC_EXPORT __declspec(dllexport) +# else +# define ARROW_FLIGHT_SQL_ODBC_EXPORT __declspec(dllimport) +# endif + +# define ARROW_FLIGHT_SQL_ODBC_NO_EXPORT +#else // Not Windows +# ifndef ARROW_FLIGHT_SQL_ODBC_EXPORT +# define ARROW_FLIGHT_SQL_ODBC_EXPORT __attribute__((visibility("default"))) +# endif +# ifndef ARROW_FLIGHT_SQL_ODBC_NO_EXPORT +# define ARROW_FLIGHT_SQL_ODBC_NO_EXPORT __attribute__((visibility("hidden"))) +# endif +#endif // Non-Windows + +#if defined(_MSC_VER) +# pragma warning(pop) +#endif From 728a99789745df6763da1c759a42e68722cc663b Mon Sep 17 00:00:00 2001 From: "Alina (Xi) Li" Date: Mon, 12 May 2025 16:28:12 -0700 Subject: [PATCH 02/63] SQLDriverConnect, SQLConnect and SQLDisconnect Implement stubs for SQLGetInfo, SQLGetDiagField and SQLGetDiagRec Separate RegisterDsn and UnregisterDsn from windows build Update code to save driver value from connection string Add ReadMes for ODBC and tests Fix test issues with string_view Address code reviews Update entry_points.cc to fix build issue Remove Dremio references Use emplace properly Address comment from Rob and add SQLDisconnect test case --- cpp/src/arrow/flight/sql/odbc/CMakeLists.txt | 20 +- cpp/src/arrow/flight/sql/odbc/README | 19 + cpp/src/arrow/flight/sql/odbc/entry_points.cc | 49 +- .../flight/sql/odbc/flight_sql/CMakeLists.txt | 4 +- .../odbc/flight_sql/config/configuration.cc | 27 +- .../odbc/flight_sql/flight_sql_auth_method.cc | 12 +- .../odbc/flight_sql/flight_sql_connection.cc | 35 +- .../odbc/flight_sql/flight_sql_connection.h | 7 + .../flight_sql/flight_sql_connection_test.cc | 36 +- .../sql/odbc/flight_sql/flight_sql_driver.cc | 12 +- .../flight_sql_get_tables_reader.cc | 7 +- .../flight_sql_statement_get_columns.cc | 8 +- .../include/flight_sql/config/configuration.h | 9 +- .../arrow/flight/sql/odbc/flight_sql/main.cc | 14 +- .../flight/sql/odbc/flight_sql/system_dsn.cc | 91 +--- .../flight/sql/odbc/flight_sql/system_dsn.h | 51 ++ .../sql/odbc/flight_sql/win_system_dsn.cc | 119 +++++ cpp/src/arrow/flight/sql/odbc/odbc.def | 3 +- cpp/src/arrow/flight/sql/odbc/odbc_api.cc | 137 ++++- cpp/src/arrow/flight/sql/odbc/odbc_api.h | 14 +- .../odbc_impl/attribute_utils.h | 17 +- .../odbc_impl/encoding_utils.h | 43 +- .../odbc_impl/odbc_connection.h | 8 + .../include/odbcabstraction/spi/connection.h | 4 +- .../odbc_impl/odbc_connection.cc | 7 +- .../flight/sql/odbc/odbcabstraction/utils.cc | 4 +- .../flight/sql/odbc/tests/CMakeLists.txt | 9 +- cpp/src/arrow/flight/sql/odbc/tests/README | 4 + .../flight/sql/odbc/tests/connection_test.cc | 502 +++++++++++++++++- .../flight/sql/odbc/tests/odbc_test_suite.cc | 172 ++++++ .../flight/sql/odbc/tests/odbc_test_suite.h | 90 ++++ 31 files changed, 1290 insertions(+), 244 deletions(-) create mode 100644 cpp/src/arrow/flight/sql/odbc/README create mode 100644 cpp/src/arrow/flight/sql/odbc/flight_sql/system_dsn.h create mode 100644 cpp/src/arrow/flight/sql/odbc/flight_sql/win_system_dsn.cc create mode 100644 cpp/src/arrow/flight/sql/odbc/tests/README create mode 100644 cpp/src/arrow/flight/sql/odbc/tests/odbc_test_suite.cc create mode 100644 cpp/src/arrow/flight/sql/odbc/tests/odbc_test_suite.h diff --git a/cpp/src/arrow/flight/sql/odbc/CMakeLists.txt b/cpp/src/arrow/flight/sql/odbc/CMakeLists.txt index 7be9758626f..1f7c5d1f61f 100644 --- a/cpp/src/arrow/flight/sql/odbc/CMakeLists.txt +++ b/cpp/src/arrow/flight/sql/odbc/CMakeLists.txt @@ -17,6 +17,16 @@ add_custom_target(arrow_flight_sql_odbc) +if(WIN32) + if(MSVC_VERSION GREATER_EQUAL 1900) + set(ODBCINST legacy_stdio_definitions odbccp32 shlwapi) + endif() +elseif(APPLE) + set(ODBCINST iodbcinst) +else() + set(ODBCINST odbcinst) +endif() + add_subdirectory(flight_sql) add_subdirectory(odbcabstraction) add_subdirectory(tests) @@ -29,16 +39,6 @@ if(WIN32) list(APPEND ARROW_FLIGHT_SQL_ODBC_SRCS odbc.def) endif() -if(WIN32) - if(MSVC_VERSION GREATER_EQUAL 1900) - set(ODBCINST legacy_stdio_definitions odbccp32 shlwapi) - endif() -elseif(APPLE) - set(ODBCINST iodbcinst) -else() - set(ODBCINST odbcinst) -endif() - add_arrow_lib(arrow_flight_sql_odbc CMAKE_PACKAGE_NAME ArrowFlightSqlOdbc diff --git a/cpp/src/arrow/flight/sql/odbc/README b/cpp/src/arrow/flight/sql/odbc/README new file mode 100644 index 00000000000..04749d9b859 --- /dev/null +++ b/cpp/src/arrow/flight/sql/odbc/README @@ -0,0 +1,19 @@ +Steps to Register the 64-bit Apache Arrow ODBC driver on Windows + +After the build succeeds, the ODBC DLL will be located in +`build\debug\Debug` for a debug build and `build\release\Release` for a release build. + +1. Open Power Shell as administrator. + +2. Register your ODBC DLL: + Need to replace with actual path to repository in the commands. + + i. `cd to repo.` + ii. `cd ` + iii. Run script to register your ODBC DLL as Apache Arrow Flight SQL ODBC Driver + `.\cpp\src\arrow\flight\sql\odbc\install\install_amd64.cmd \cpp\build\< release | debug >\< Release | Debug>\arrow_flight_sql_odbc.dll` + Example command for reference: + `.\cpp\src\arrow\flight\sql\odbc\install\install_amd64.cmd C:\path\to\arrow\cpp\build\release\Release\arrow_flight_sql_odbc.dll` + +If the registration is successful, then Apache Arrow Flight SQL ODBC Driver +should show as an available ODBC driver in the x64 ODBC Driver Manager. diff --git a/cpp/src/arrow/flight/sql/odbc/entry_points.cc b/cpp/src/arrow/flight/sql/odbc/entry_points.cc index ce91e88e053..0fc55720938 100644 --- a/cpp/src/arrow/flight/sql/odbc/entry_points.cc +++ b/cpp/src/arrow/flight/sql/odbc/entry_points.cc @@ -69,14 +69,43 @@ SQLRETURN SQL_API SQLSetEnvAttr(SQLHENV env, SQLINTEGER attr, SQLPOINTER valuePt return arrow::SQLSetEnvAttr(env, attr, valuePtr, strLen); } -SQLRETURN SQL_API SQLDriverConnect(SQLHDBC conn, SQLHWND windowHandle, - SQLWCHAR* inConnectionString, - SQLSMALLINT inConnectionStringLen, - SQLWCHAR* outConnectionString, - SQLSMALLINT outConnectionStringBufferLen, - SQLSMALLINT* outConnectionStringLen, - SQLUSMALLINT driverCompletion) { - // TODO: implement SQLDriverConnect by linking to `odbc_impl` //-AL- TODO: create GitHub - // issue for SQLDriverConnect implementation - return SQL_INVALID_HANDLE; +SQLRETURN SQL_API SQLSetConnectAttrW(SQLHDBC conn, SQLINTEGER attr, SQLPOINTER value, + SQLINTEGER valueLen) { + // TODO implement SQLSetConnectAttr + return SQL_ERROR; } + +SQLRETURN SQL_API SQLGetInfoW(SQLHDBC conn, SQLUSMALLINT infoType, + SQLPOINTER infoValuePtr, SQLSMALLINT bufLen, + SQLSMALLINT* length) { + return arrow::SQLGetInfoW(conn, infoType, infoValuePtr, bufLen, length); +} + +SQLRETURN SQL_API SQLGetDiagRecW(SQLSMALLINT type, SQLHANDLE handle, SQLSMALLINT recNum, + SQLWCHAR* sqlState, SQLINTEGER* nativeError, + SQLWCHAR* msgBuffer, SQLSMALLINT msgBufferLen, + SQLSMALLINT* msgLen) { + // TODO implement SQLGetDiagRecW + return SQL_ERROR; +} + +SQLRETURN SQL_API SQLDriverConnectW(SQLHDBC conn, SQLHWND windowHandle, + SQLWCHAR* inConnectionString, + SQLSMALLINT inConnectionStringLen, + SQLWCHAR* outConnectionString, + SQLSMALLINT outConnectionStringBufferLen, + SQLSMALLINT* outConnectionStringLen, + SQLUSMALLINT driverCompletion) { + return arrow::SQLDriverConnectW( + conn, windowHandle, inConnectionString, inConnectionStringLen, outConnectionString, + outConnectionStringBufferLen, outConnectionStringLen, driverCompletion); +} + +SQLRETURN SQL_API SQLConnectW(SQLHDBC conn, SQLWCHAR* dsnName, SQLSMALLINT dsnNameLen, + SQLWCHAR* userName, SQLSMALLINT userNameLen, + SQLWCHAR* password, SQLSMALLINT passwordLen) { + return arrow::SQLConnectW(conn, dsnName, dsnNameLen, userName, userNameLen, password, + passwordLen); +} + +SQLRETURN SQL_API SQLDisconnect(SQLHDBC conn) { return arrow::SQLDisconnect(conn); } diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/CMakeLists.txt b/cpp/src/arrow/flight/sql/odbc/flight_sql/CMakeLists.txt index 56aabb54dbf..6985f781b9a 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/CMakeLists.txt +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/CMakeLists.txt @@ -76,6 +76,8 @@ add_library(arrow_odbc_spi_impl scalar_function_reporter.h system_trust_store.cc system_trust_store.h + system_dsn.cc + system_dsn.h utils.cc) target_include_directories(arrow_odbc_spi_impl PUBLIC include include/flight_sql @@ -96,7 +98,7 @@ if(WIN32) ui/window.cc ui/dsn_configuration_window.cc ui/add_property_window.cc - system_dsn.cc) + win_system_dsn.cc) endif() target_link_libraries(arrow_odbc_spi_impl PUBLIC odbcabstraction arrow_flight_sql_shared) diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/config/configuration.cc b/cpp/src/arrow/flight/sql/odbc/flight_sql/config/configuration.cc index 4eb7d5980c2..bfd050e724b 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/config/configuration.cc +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/config/configuration.cc @@ -38,16 +38,14 @@ std::string ReadDsnString(const std::string& dsn, const std::string_view& key, const std::string& dflt = "") { #define BUFFER_SIZE (1024) std::vector buf(BUFFER_SIZE); - int ret = - SQLGetPrivateProfileString(dsn.c_str(), key.data(), dflt.c_str(), buf.data(), - static_cast(buf.size()), "ODBC.INI"); + int ret = SQLGetPrivateProfileString(dsn.c_str(), key.data(), dflt.c_str(), buf.data(), + static_cast(buf.size()), "ODBC.INI"); if (ret > BUFFER_SIZE) { // If there wasn't enough space, try again with the right size buffer. buf.resize(ret + 1); - ret = - SQLGetPrivateProfileString(dsn.c_str(), key.data(), dflt.c_str(), buf.data(), - static_cast(buf.size()), "ODBC.INI"); + ret = SQLGetPrivateProfileString(dsn.c_str(), key.data(), dflt.c_str(), buf.data(), + static_cast(buf.size()), "ODBC.INI"); } return std::string(buf.data(), ret); @@ -140,11 +138,11 @@ void Configuration::LoadDsn(const std::string& dsn) { void Configuration::Clear() { this->properties.clear(); } bool Configuration::IsSet(const std::string_view& key) const { - return 0 != this->properties.count(key); + return 0 != this->properties.count(std::string(key)); } const std::string& Configuration::Get(const std::string_view& key) const { - const auto itr = this->properties.find(key); + const auto itr = this->properties.find(std::string(key)); if (itr == this->properties.cend()) { static const std::string empty(""); return empty; @@ -155,7 +153,15 @@ const std::string& Configuration::Get(const std::string_view& key) const { void Configuration::Set(const std::string_view& key, const std::string& value) { const std::string copy = boost::trim_copy(value); if (!copy.empty()) { - this->properties[key] = value; + this->properties[std::string(key)] = value; + } +} + +void Configuration::Emplace(const std::string_view& key, std::string&& value) { + const std::string copy = boost::trim_copy(value); + if (!copy.empty()) { + this->properties.emplace( + std::make_pair(std::move(std::string(key)), std::move(value))); } } @@ -167,13 +173,12 @@ const driver::odbcabstraction::Connection::ConnPropertyMap& Configuration::GetPr std::vector Configuration::GetCustomKeys() const { driver::odbcabstraction::Connection::ConnPropertyMap copyProps(properties); for (auto& key : FlightSqlConnection::ALL_KEYS) { - copyProps.erase(key); + copyProps.erase(std::string(key)); } std::vector keys; boost::copy(copyProps | boost::adaptors::map_keys, std::back_inserter(keys)); return keys; } - } // namespace config } // namespace flight_sql } // namespace driver diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_auth_method.cc b/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_auth_method.cc index fcf951270e6..3fcc3a87162 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_auth_method.cc +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_auth_method.cc @@ -153,22 +153,22 @@ std::unique_ptr FlightSqlAuthMethod::FromProperties( const std::unique_ptr& client, const Connection::ConnPropertyMap& properties) { // Check if should use user-password authentication - auto it_user = properties.find(FlightSqlConnection::USER); + auto it_user = properties.find(std::string(FlightSqlConnection::USER)); if (it_user == properties.end()) { // The Microsoft OLE DB to ODBC bridge provider (MSDASQL) will write // "User ID" and "Password" properties instead of mapping // to ODBC compliant UID/PWD keys. - it_user = properties.find(FlightSqlConnection::USER_ID); + it_user = properties.find(std::string(FlightSqlConnection::USER_ID)); } - auto it_password = properties.find(FlightSqlConnection::PASSWORD); - auto it_token = properties.find(FlightSqlConnection::TOKEN); + auto it_password = properties.find(std::string(FlightSqlConnection::PASSWORD)); + auto it_token = properties.find(std::string(FlightSqlConnection::TOKEN)); if (it_user == properties.end() || it_password == properties.end()) { // Accept UID/PWD as aliases for User/Password. These are suggested as // standard properties in the documentation for SQLDriverConnect. - it_user = properties.find(FlightSqlConnection::UID); - it_password = properties.find(FlightSqlConnection::PWD); + it_user = properties.find(std::string(FlightSqlConnection::UID)); + it_password = properties.find(std::string(FlightSqlConnection::PWD)); } if (it_user != properties.end() || it_password != properties.end()) { const std::string& user = it_user != properties.end() ? it_user->second : ""; diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_connection.cc b/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_connection.cc index 09764e5c18b..708ac2f81a4 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_connection.cc +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_connection.cc @@ -111,26 +111,26 @@ inline std::string GetCerts() { return ""; } #endif -const std::set - BUILT_IN_PROPERTIES = {FlightSqlConnection::HOST, - FlightSqlConnection::PORT, - FlightSqlConnection::USER, - FlightSqlConnection::USER_ID, - FlightSqlConnection::UID, - FlightSqlConnection::PASSWORD, - FlightSqlConnection::PWD, - FlightSqlConnection::TOKEN, - FlightSqlConnection::USE_ENCRYPTION, - FlightSqlConnection::DISABLE_CERTIFICATE_VERIFICATION, - FlightSqlConnection::TRUSTED_CERTS, - FlightSqlConnection::USE_SYSTEM_TRUST_STORE, - FlightSqlConnection::STRING_COLUMN_LENGTH, - FlightSqlConnection::USE_WIDE_CHAR}; +const std::set BUILT_IN_PROPERTIES = { + FlightSqlConnection::HOST, + FlightSqlConnection::PORT, + FlightSqlConnection::USER, + FlightSqlConnection::USER_ID, + FlightSqlConnection::UID, + FlightSqlConnection::PASSWORD, + FlightSqlConnection::PWD, + FlightSqlConnection::TOKEN, + FlightSqlConnection::USE_ENCRYPTION, + FlightSqlConnection::DISABLE_CERTIFICATE_VERIFICATION, + FlightSqlConnection::TRUSTED_CERTS, + FlightSqlConnection::USE_SYSTEM_TRUST_STORE, + FlightSqlConnection::STRING_COLUMN_LENGTH, + FlightSqlConnection::USE_WIDE_CHAR}; Connection::ConnPropertyMap::const_iterator TrackMissingRequiredProperty( const std::string_view& property, const Connection::ConnPropertyMap& properties, std::vector& missing_attr) { - auto prop_iter = properties.find(property); + auto prop_iter = properties.find(std::string(property)); if (properties.end() == prop_iter) { missing_attr.push_back(property); } @@ -149,7 +149,8 @@ std::shared_ptr LoadFlightSslConfigs( AsBool(connPropertyMap, FlightSqlConnection::USE_SYSTEM_TRUST_STORE) .value_or(SYSTEM_TRUST_STORE_DEFAULT); - auto trusted_certs_iterator = connPropertyMap.find(FlightSqlConnection::TRUSTED_CERTS); + auto trusted_certs_iterator = + connPropertyMap.find(std::string(FlightSqlConnection::TRUSTED_CERTS)); auto trusted_certs = trusted_certs_iterator != connPropertyMap.end() ? trusted_certs_iterator->second : ""; diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_connection.h b/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_connection.h index 0ee6d5d5391..0a4b213229f 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_connection.h +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_connection.h @@ -29,6 +29,13 @@ namespace driver { namespace flight_sql { +/// \brief Case insensitive comparator that takes string_view +struct CaseInsensitiveComparatorStrView { + bool operator()(const std::string_view& s1, const std::string_view& s2) const { + return boost::lexicographical_compare(s1, s2, boost::is_iless()); + } +}; + class FlightSqlSslConfig; /// \brief Create an instance of the FlightSqlSslConfig class, from the properties passed diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_connection_test.cc b/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_connection_test.cc index 6a519138b63..a7a0fc10c29 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_connection_test.cc +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_connection_test.cc @@ -69,10 +69,12 @@ TEST(MetadataSettingsTest, StringColumnLengthTest) { const int32_t expected_string_column_length = 100000; const Connection::ConnPropertyMap properties = { - {FlightSqlConnection::HOST, std::string("localhost")}, // expect not used - {FlightSqlConnection::PORT, std::string("32010")}, // expect not used - {FlightSqlConnection::USE_ENCRYPTION, std::string("false")}, // expect not used - {FlightSqlConnection::STRING_COLUMN_LENGTH, + {std::string(FlightSqlConnection::HOST), + std::string("localhost")}, // expect not used + {std::string(FlightSqlConnection::PORT), std::string("32010")}, // expect not used + {std::string(FlightSqlConnection::USE_ENCRYPTION), + std::string("false")}, // expect not used + {std::string(FlightSqlConnection::STRING_COLUMN_LENGTH), std::to_string(expected_string_column_length)}, }; @@ -90,10 +92,10 @@ TEST(MetadataSettingsTest, UseWideCharTest) { connection.SetClosed(false); const Connection::ConnPropertyMap properties1 = { - {FlightSqlConnection::USE_WIDE_CHAR, std::string("true")}, + {std::string(FlightSqlConnection::USE_WIDE_CHAR), std::string("true")}, }; const Connection::ConnPropertyMap properties2 = { - {FlightSqlConnection::USE_WIDE_CHAR, std::string("false")}, + {std::string(FlightSqlConnection::USE_WIDE_CHAR), std::string("false")}, }; EXPECT_EQ(true, connection.GetUseWideChar(properties1)); @@ -105,9 +107,9 @@ TEST(MetadataSettingsTest, UseWideCharTest) { TEST(BuildLocationTests, ForTcp) { std::vector missing_attr; Connection::ConnPropertyMap properties = { - {FlightSqlConnection::HOST, std::string("localhost")}, - {FlightSqlConnection::PORT, std::string("32010")}, - {FlightSqlConnection::USE_ENCRYPTION, std::string("false")}, + {std::string(FlightSqlConnection::HOST), std::string("localhost")}, + {std::string(FlightSqlConnection::PORT), std::string("32010")}, + {std::string(FlightSqlConnection::USE_ENCRYPTION), std::string("false")}, }; const std::shared_ptr& ssl_config = @@ -117,8 +119,8 @@ TEST(BuildLocationTests, ForTcp) { FlightSqlConnection::BuildLocation(properties, missing_attr, ssl_config); const Location& actual_location2 = FlightSqlConnection::BuildLocation( { - {FlightSqlConnection::HOST, std::string("localhost")}, - {FlightSqlConnection::PORT, std::string("32011")}, + {std::string(FlightSqlConnection::HOST), std::string("localhost")}, + {std::string(FlightSqlConnection::PORT), std::string("32011")}, }, missing_attr, ssl_config); @@ -131,9 +133,9 @@ TEST(BuildLocationTests, ForTcp) { TEST(BuildLocationTests, ForTls) { std::vector missing_attr; Connection::ConnPropertyMap properties = { - {FlightSqlConnection::HOST, std::string("localhost")}, - {FlightSqlConnection::PORT, std::string("32010")}, - {FlightSqlConnection::USE_ENCRYPTION, std::string("1")}, + {std::string(FlightSqlConnection::HOST), std::string("localhost")}, + {std::string(FlightSqlConnection::PORT), std::string("32010")}, + {std::string(FlightSqlConnection::USE_ENCRYPTION), std::string("1")}, }; const std::shared_ptr& ssl_config = @@ -143,9 +145,9 @@ TEST(BuildLocationTests, ForTls) { FlightSqlConnection::BuildLocation(properties, missing_attr, ssl_config); Connection::ConnPropertyMap second_properties = { - {FlightSqlConnection::HOST, std::string("localhost")}, - {FlightSqlConnection::PORT, std::string("32011")}, - {FlightSqlConnection::USE_ENCRYPTION, std::string("1")}, + {std::string(FlightSqlConnection::HOST), std::string("localhost")}, + {std::string(FlightSqlConnection::PORT), std::string("32011")}, + {std::string(FlightSqlConnection::USE_ENCRYPTION), std::string("1")}, }; const std::shared_ptr& second_ssl_config = diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_driver.cc b/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_driver.cc index 1949d2f15ad..61a11252380 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_driver.cc +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_driver.cc @@ -66,7 +66,7 @@ void FlightSqlDriver::RegisterLog() { odbcabstraction::PropertyMap propertyMap; driver::odbcabstraction::ReadConfigFile(propertyMap, CONFIG_FILE_NAME); - auto log_enable_iterator = propertyMap.find(SPDLogger::LOG_ENABLED); + auto log_enable_iterator = propertyMap.find(std::string(SPDLogger::LOG_ENABLED)); auto log_enabled = log_enable_iterator != propertyMap.end() ? odbcabstraction::AsBool(log_enable_iterator->second) : false; @@ -74,13 +74,13 @@ void FlightSqlDriver::RegisterLog() { return; } - auto log_path_iterator = propertyMap.find(SPDLogger::LOG_PATH); + auto log_path_iterator = propertyMap.find(std::string(SPDLogger::LOG_PATH)); auto log_path = log_path_iterator != propertyMap.end() ? log_path_iterator->second : ""; if (log_path.empty()) { return; } - auto log_level_iterator = propertyMap.find(SPDLogger::LOG_LEVEL); + auto log_level_iterator = propertyMap.find(std::string(SPDLogger::LOG_LEVEL)); auto log_level = ToLogLevel(log_level_iterator != propertyMap.end() ? std::stoi(log_level_iterator->second) : 1); @@ -88,12 +88,14 @@ void FlightSqlDriver::RegisterLog() { return; } - auto maximum_file_size_iterator = propertyMap.find(SPDLogger::MAXIMUM_FILE_SIZE); + auto maximum_file_size_iterator = + propertyMap.find(std::string(SPDLogger::MAXIMUM_FILE_SIZE)); auto maximum_file_size = maximum_file_size_iterator != propertyMap.end() ? std::stoi(maximum_file_size_iterator->second) : DEFAULT_MAXIMUM_FILE_SIZE; - auto maximum_file_quantity_iterator = propertyMap.find(SPDLogger::FILE_QUANTITY); + auto maximum_file_quantity_iterator = + propertyMap.find(std::string(SPDLogger::FILE_QUANTITY)); auto maximum_file_quantity = maximum_file_quantity_iterator != propertyMap.end() ? std::stoi(maximum_file_quantity_iterator->second) : 1; diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_get_tables_reader.cc b/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_get_tables_reader.cc index ccd6058f8cd..b048d1984c5 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_get_tables_reader.cc +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_get_tables_reader.cc @@ -80,9 +80,10 @@ std::shared_ptr GetTablesReader::GetSchema() { const arrow::Result>& result = arrow::ipc::ReadSchema(&dataset_schema_reader, &in_memo); if (!result.ok()) { - // TODO: Ignoring this error until we fix the problem on Dremio server - // The problem is that complex types columns are being returned without the children - // types. + // TODO: Test and build the driver against a server that returns + // complex types columns with the children + // types and handle the failure properly + // https://github.com/apache/arrow/issues/46561 return nullptr; } diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_statement_get_columns.cc b/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_statement_get_columns.cc index 0e250d1af9b..d3250401193 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_statement_get_columns.cc +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_statement_get_columns.cc @@ -98,10 +98,10 @@ Result> Transform_inner( const auto& table_name = reader.GetTableName(); const std::shared_ptr& schema = reader.GetSchema(); if (schema == nullptr) { - // TODO: Remove this if after fixing TODO on GetTablesReader::GetSchema() - // This is because of a problem on Dremio server, where complex types columns - // are being returned without the children types, so we are simply ignoring - // it by now. + // TODO: Test and build the driver against a server that returns + // complex types columns with the children + // types and handle the failure properly. + // https://github.com/apache/arrow/issues/46561 continue; } for (int i = 0; i < schema->num_fields(); ++i) { diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/include/flight_sql/config/configuration.h b/cpp/src/arrow/flight/sql/odbc/flight_sql/include/flight_sql/config/configuration.h index 69fa8a8696c..c7c9cc5b894 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/include/flight_sql/config/configuration.h +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/include/flight_sql/config/configuration.h @@ -46,13 +46,6 @@ class Configuration { */ ~Configuration(); - /** - * Convert configure to connect string. - * - * @return Connect string. - */ - std::string ToConnectString() const; - void LoadDefaults(); void LoadDsn(const std::string& dsn); @@ -60,7 +53,7 @@ class Configuration { bool IsSet(const std::string_view& key) const; const std::string& Get(const std::string_view& key) const; void Set(const std::string_view& key, const std::string& value); - + void Emplace(const std::string_view& key, std::string&& value); /** * Get properties map. */ diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/main.cc b/cpp/src/arrow/flight/sql/odbc/flight_sql/main.cc index e112fdf67c0..aaf267cc268 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/main.cc +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/main.cc @@ -43,7 +43,7 @@ using driver::odbcabstraction::Statement; void TestBindColumn(const std::shared_ptr& connection) { const std::shared_ptr& statement = connection->CreateStatement(); - statement->Execute("SELECT IncidntNum, Category FROM \"@dremio\".Test LIMIT 10"); + statement->Execute("SELECT IncidntNum, Category FROM \"@apache\".Test LIMIT 10"); const std::shared_ptr& result_set = statement->GetResultSet(); @@ -105,7 +105,7 @@ void TestBindColumnBigInt(const std::shared_ptr& connection) { " SELECT CONVERT_TO_INTEGER(IncidntNum, 1, 1, 0) AS IncidntNum, " "Category\n" " FROM (\n" - " SELECT IncidntNum, Category FROM \"@dremio\".Test LIMIT 10\n" + " SELECT IncidntNum, Category FROM \"@apache\".Test LIMIT 10\n" " ) nested_0\n" ") nested_0"); @@ -202,11 +202,11 @@ int main() { driver.CreateConnection(driver::odbcabstraction::V_3); Connection::ConnPropertyMap properties = { - {FlightSqlConnection::HOST, std::string("automaster.drem.io")}, - {FlightSqlConnection::PORT, std::string("32010")}, - {FlightSqlConnection::USER, std::string("dremio")}, - {FlightSqlConnection::PASSWORD, std::string("dremio123")}, - {FlightSqlConnection::USE_ENCRYPTION, std::string("false")}, + {std::string(FlightSqlConnection::HOST), std::string("automaster.apache")}, + {std::string(FlightSqlConnection::PORT), std::string("32010")}, + {std::string(FlightSqlConnection::USER), std::string("apache")}, + {std::string(FlightSqlConnection::PASSWORD), std::string("apache123")}, + {std::string(FlightSqlConnection::USE_ENCRYPTION), std::string("false")}, }; std::vector missing_attr; connection->Connect(properties, missing_attr); diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/system_dsn.cc b/cpp/src/arrow/flight/sql/odbc/flight_sql/system_dsn.cc index 67a4c3db3d3..95b47bdb1e2 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/system_dsn.cc +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/system_dsn.cc @@ -15,63 +15,16 @@ // specific language governing permissions and limitations // under the License. -// platform.h includes windows.h, so it needs to be included -// before winuser.h -#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/platform.h" +#include "arrow/flight/sql/odbc/flight_sql/system_dsn.h" -#include -#include #include "arrow/flight/sql/odbc/flight_sql/flight_sql_connection.h" #include "arrow/flight/sql/odbc/flight_sql/include/flight_sql/config/configuration.h" -#include "arrow/flight/sql/odbc/flight_sql/include/flight_sql/config/connection_string_parser.h" -#include "arrow/flight/sql/odbc/flight_sql/include/flight_sql/ui/dsn_configuration_window.h" -#include "arrow/flight/sql/odbc/flight_sql/include/flight_sql/ui/window.h" -#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/exceptions.h" #include -#include -#include #include using driver::flight_sql::FlightSqlConnection; using driver::flight_sql::config::Configuration; -using driver::flight_sql::config::ConnectionStringParser; -using driver::flight_sql::config::DsnConfigurationWindow; -using driver::flight_sql::config::Result; -using driver::flight_sql::config::Window; - -BOOL CALLBACK ConfigDriver(HWND hwndParent, WORD fRequest, LPCSTR lpszDriver, - LPCSTR lpszArgs, LPSTR lpszMsg, WORD cbMsgMax, - WORD* pcbMsgOut) { - return false; -} - -bool DisplayConnectionWindow(void* windowParent, Configuration& config) { - HWND hwndParent = (HWND)windowParent; - - if (!hwndParent) return true; - - try { - Window parent(hwndParent); - DsnConfigurationWindow window(&parent, config); - - window.Create(); - - window.Show(); - window.Update(); - - return ProcessMessages(window) == Result::OK; - } catch (driver::odbcabstraction::DriverException& err) { - std::stringstream buf; - buf << "Message: " << err.GetMessageText() << ", Code: " << err.GetNativeError(); - std::string message = buf.str(); - MessageBox(NULL, message.c_str(), "Error!", MB_ICONEXCLAMATION | MB_OK); - - SQLPostInstallerError(err.GetNativeError(), err.GetMessageText().c_str()); - } - - return false; -} void PostLastInstallerError() { #define BUFFER_SIZE (1024) @@ -134,45 +87,3 @@ bool RegisterDsn(const Configuration& config, LPCSTR driver) { return true; } - -BOOL INSTAPI ConfigDSN(HWND hwndParent, WORD req, LPCSTR driver, LPCSTR attributes) { - Configuration config; - ConnectionStringParser parser(config); - parser.ParseConfigAttributes(attributes); - - switch (req) { - case ODBC_ADD_DSN: { - config.LoadDefaults(); - if (!DisplayConnectionWindow(hwndParent, config) || !RegisterDsn(config, driver)) - return FALSE; - - break; - } - - case ODBC_CONFIG_DSN: { - const std::string& dsn = config.Get(FlightSqlConnection::DSN); - if (!SQLValidDSN(dsn.c_str())) return FALSE; - - Configuration loaded(config); - loaded.LoadDsn(dsn); - - if (!DisplayConnectionWindow(hwndParent, loaded) || !UnregisterDsn(dsn.c_str()) || - !RegisterDsn(loaded, driver)) - return FALSE; - - break; - } - - case ODBC_REMOVE_DSN: { - const std::string& dsn = config.Get(FlightSqlConnection::DSN); - if (!SQLValidDSN(dsn.c_str()) || !UnregisterDsn(dsn)) return FALSE; - - break; - } - - default: - return FALSE; - } - - return TRUE; -} diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/system_dsn.h b/cpp/src/arrow/flight/sql/odbc/flight_sql/system_dsn.h new file mode 100644 index 00000000000..535a063269e --- /dev/null +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/system_dsn.h @@ -0,0 +1,51 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// platform.h includes windows.h, so it needs to be included first +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/platform.h" + +#include "arrow/flight/sql/odbc/flight_sql/include/flight_sql/config/configuration.h" + +using driver::flight_sql::config::Configuration; + +#if defined _WIN32 || defined _WIN64 +/** + * Display connection window for user to configure connection parameters. + * + * @param windowParent Parent window handle. + * @param config Output configuration. + * @return True on success and false on fail. + */ +bool DisplayConnectionWindow(void* windowParent, Configuration& config); +#endif + +/** + * Register DSN with specified configuration. + * + * @param config Configuration. + * @param driver Driver. + * @return True on success and false on fail. + */ +bool RegisterDsn(const Configuration& config, LPCSTR driver); + +/** + * Unregister specified DSN. + * + * @param dsn DSN name. + * @return True on success and false on fail. + */ +bool UnregisterDsn(const std::string& dsn); diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/win_system_dsn.cc b/cpp/src/arrow/flight/sql/odbc/flight_sql/win_system_dsn.cc new file mode 100644 index 00000000000..a2bf2565610 --- /dev/null +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/win_system_dsn.cc @@ -0,0 +1,119 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// platform.h includes windows.h, so it needs to be included +// before winuser.h +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/platform.h" + +#include +#include + +#include "arrow/flight/sql/odbc/flight_sql/flight_sql_connection.h" +#include "arrow/flight/sql/odbc/flight_sql/include/flight_sql/config/configuration.h" +#include "arrow/flight/sql/odbc/flight_sql/include/flight_sql/config/connection_string_parser.h" +#include "arrow/flight/sql/odbc/flight_sql/include/flight_sql/ui/dsn_configuration_window.h" +#include "arrow/flight/sql/odbc/flight_sql/include/flight_sql/ui/window.h" +#include "arrow/flight/sql/odbc/flight_sql/system_dsn.h" +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/exceptions.h" + +#include +#include +#include +#include + +using driver::flight_sql::FlightSqlConnection; +using driver::flight_sql::config::Configuration; +using driver::flight_sql::config::ConnectionStringParser; +using driver::flight_sql::config::DsnConfigurationWindow; +using driver::flight_sql::config::Result; +using driver::flight_sql::config::Window; + +BOOL CALLBACK ConfigDriver(HWND hwndParent, WORD fRequest, LPCSTR lpszDriver, + LPCSTR lpszArgs, LPSTR lpszMsg, WORD cbMsgMax, + WORD* pcbMsgOut) { + return false; +} + +bool DisplayConnectionWindow(void* windowParent, Configuration& config) { + HWND hwndParent = (HWND)windowParent; + + if (!hwndParent) return true; + + try { + Window parent(hwndParent); + DsnConfigurationWindow window(&parent, config); + + window.Create(); + + window.Show(); + window.Update(); + + return ProcessMessages(window) == Result::OK; + } catch (const driver::odbcabstraction::DriverException& err) { + std::stringstream buf; + buf << "SQL State: " << err.GetSqlState() << ", Message: " << err.GetMessageText() + << ", Code: " << err.GetNativeError(); + std::string message = buf.str(); + MessageBox(NULL, message.c_str(), "Error!", MB_ICONEXCLAMATION | MB_OK); + + SQLPostInstallerError(err.GetNativeError(), err.GetMessageText().c_str()); + } + + return false; +} + +BOOL INSTAPI ConfigDSN(HWND hwndParent, WORD req, LPCSTR driver, LPCSTR attributes) { + Configuration config; + ConnectionStringParser parser(config); + parser.ParseConfigAttributes(attributes); + + switch (req) { + case ODBC_ADD_DSN: { + config.LoadDefaults(); + if (!DisplayConnectionWindow(hwndParent, config) || !RegisterDsn(config, driver)) + return FALSE; + + break; + } + + case ODBC_CONFIG_DSN: { + const std::string& dsn = config.Get(FlightSqlConnection::DSN); + if (!SQLValidDSN(dsn.c_str())) return FALSE; + + Configuration loaded(config); + loaded.LoadDsn(dsn); + + if (!DisplayConnectionWindow(hwndParent, loaded) || !UnregisterDsn(dsn.c_str()) || + !RegisterDsn(loaded, driver)) + return FALSE; + + break; + } + + case ODBC_REMOVE_DSN: { + const std::string& dsn = config.Get(FlightSqlConnection::DSN); + if (!SQLValidDSN(dsn.c_str()) || !UnregisterDsn(dsn)) return FALSE; + + break; + } + + default: + return FALSE; + } + + return TRUE; +} diff --git a/cpp/src/arrow/flight/sql/odbc/odbc.def b/cpp/src/arrow/flight/sql/odbc/odbc.def index 2c93d183c92..dba68425f49 100644 --- a/cpp/src/arrow/flight/sql/odbc/odbc.def +++ b/cpp/src/arrow/flight/sql/odbc/odbc.def @@ -1,6 +1,6 @@ - LIBRARY arrow_flight_sql_odbc EXPORTS + ConfigDSN SQLAllocConnect SQLAllocEnv SQLAllocHandle @@ -38,4 +38,3 @@ EXPORTS SQLSetEnvAttr SQLSetStmtAttrW SQLTablesW - diff --git a/cpp/src/arrow/flight/sql/odbc/odbc_api.cc b/cpp/src/arrow/flight/sql/odbc/odbc_api.cc index a57a371044e..9d2df40c05a 100644 --- a/cpp/src/arrow/flight/sql/odbc/odbc_api.cc +++ b/cpp/src/arrow/flight/sql/odbc/odbc_api.cc @@ -15,20 +15,30 @@ // specific language governing permissions and limitations // under the License. +// flight_sql_connection.h needs to be included first due to conflicts with windows.h +#include "arrow/flight/sql/odbc/flight_sql/flight_sql_connection.h" + #include #include -#include +#include #include #include #include +#include "arrow/flight/sql/odbc/flight_sql/include/flight_sql/config/configuration.h" +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/attribute_utils.h" + +#if defined _WIN32 || defined _WIN64 +// For displaying DSN Window +# include "arrow/flight/sql/odbc/flight_sql/system_dsn.h" +#endif + // odbc_api includes windows.h, which needs to be put behind winsock2.h. // odbc_environment.h includes winsock2.h #include namespace arrow { SQLRETURN SQLAllocHandle(SQLSMALLINT type, SQLHANDLE parent, SQLHANDLE* result) { - // TODO: implement SQLAllocHandle by linking to `odbc_impl` *result = nullptr; switch (type) { @@ -103,11 +113,13 @@ SQLRETURN SQLFreeHandle(SQLSMALLINT type, SQLHANDLE handle) { ODBCConnection* conn = reinterpret_cast(handle); - return ODBCConnection::ExecuteWithDiagnostics(conn, SQL_ERROR, [=]() { - conn->releaseConnection(); + if (!conn) { + return SQL_INVALID_HANDLE; + } - return SQL_SUCCESS; - }); + conn->releaseConnection(); + + return SQL_SUCCESS; } case SQL_HANDLE_STMT: @@ -327,4 +339,117 @@ SQLRETURN SQLSetEnvAttr(SQLHENV env, SQLINTEGER attr, SQLPOINTER valuePtr, } }); } + +SQLRETURN SQLDriverConnectW(SQLHDBC conn, SQLHWND windowHandle, + SQLWCHAR* inConnectionString, + SQLSMALLINT inConnectionStringLen, + SQLWCHAR* outConnectionString, + SQLSMALLINT outConnectionStringBufferLen, + SQLSMALLINT* outConnectionStringLen, + SQLUSMALLINT driverCompletion) { + // TODO: Implement FILEDSN and SAVEFILE keywords according to the spec + // https://github.com/apache/arrow/issues/46449 + + // TODO: Copy connection string properly in SQLDriverConnectW according to the + // spec https://github.com/apache/arrow/issues/46560 + + using driver::odbcabstraction::Connection; + using ODBC::ODBCConnection; + + return ODBCConnection::ExecuteWithDiagnostics(conn, SQL_ERROR, [=]() { + ODBCConnection* connection = reinterpret_cast(conn); + std::string connection_string = + ODBC::SqlWcharToString(inConnectionString, inConnectionStringLen); + Connection::ConnPropertyMap properties; + std::string dsn = + ODBCConnection::getPropertiesFromConnString(connection_string, properties); + + std::vector missing_properties; + + // TODO: Implement SQL_DRIVER_COMPLETE_REQUIRED in SQLDriverConnectW according to the + // spec https://github.com/apache/arrow/issues/46448 +#if defined _WIN32 || defined _WIN64 + if (driverCompletion == SQL_DRIVER_PROMPT || + ((driverCompletion == SQL_DRIVER_COMPLETE || + driverCompletion == SQL_DRIVER_COMPLETE_REQUIRED) && + !missing_properties.empty())) { + // TODO: implement driverCompletion behavior to display connection window. + } +#endif + + connection->connect(dsn, properties, missing_properties); + + // Copy connection string to outConnectionString after connection attempt + return ODBC::GetStringAttribute(true, connection_string, true, outConnectionString, + outConnectionStringBufferLen, outConnectionStringLen, + connection->GetDiagnostics()); + }); +} + +SQLRETURN SQLConnectW(SQLHDBC conn, SQLWCHAR* dsnName, SQLSMALLINT dsnNameLen, + SQLWCHAR* userName, SQLSMALLINT userNameLen, SQLWCHAR* password, + SQLSMALLINT passwordLen) { + using driver::flight_sql::FlightSqlConnection; + using driver::flight_sql::config::Configuration; + using ODBC::ODBCConnection; + + using ODBC::SqlWcharToString; + + return ODBCConnection::ExecuteWithDiagnostics(conn, SQL_ERROR, [=]() { + ODBCConnection* connection = reinterpret_cast(conn); + std::string dsn = SqlWcharToString(dsnName, dsnNameLen); + + Configuration config; + config.LoadDsn(dsn); + + if (userName) { + std::string uid = SqlWcharToString(userName, userNameLen); + config.Emplace(FlightSqlConnection::UID, std::move(uid)); + } + + if (password) { + std::string pwd = SqlWcharToString(password, passwordLen); + config.Emplace(FlightSqlConnection::PWD, std::move(pwd)); + } + + std::vector missing_properties; + + connection->connect(dsn, config.GetProperties(), missing_properties); + + return SQL_SUCCESS; + }); +} + +SQLRETURN SQLDisconnect(SQLHDBC conn) { + using ODBC::ODBCConnection; + + return ODBCConnection::ExecuteWithDiagnostics(conn, SQL_ERROR, [=]() { + ODBCConnection* connection = reinterpret_cast(conn); + + connection->disconnect(); + + return SQL_SUCCESS; + }); +} + +SQLRETURN SQLGetInfoW(SQLHDBC conn, SQLUSMALLINT infoType, SQLPOINTER infoValuePtr, + SQLSMALLINT bufLen, SQLSMALLINT* length) { + // TODO: complete implementation of SQLGetInfoW and write tests + using ODBC::ODBCConnection; + + return ODBCConnection::ExecuteWithDiagnostics(conn, SQL_ERROR, [=]() { + ODBCConnection* connection = reinterpret_cast(conn); + + // Partially stubbed implementation of SQLGetInfoW + if (infoType == SQL_DRIVER_ODBC_VER) { + std::string_view ver("03.80"); + + return ODBC::GetStringAttribute(true, ver, true, infoValuePtr, bufLen, length, + connection->GetDiagnostics()); + } + + return static_cast(SQL_ERROR); + }); +} + } // namespace arrow diff --git a/cpp/src/arrow/flight/sql/odbc/odbc_api.h b/cpp/src/arrow/flight/sql/odbc/odbc_api.h index 6c204fe3ae3..14bbddaa3ce 100644 --- a/cpp/src/arrow/flight/sql/odbc/odbc_api.h +++ b/cpp/src/arrow/flight/sql/odbc/odbc_api.h @@ -28,7 +28,6 @@ // @file odbc_api.h // // Define internal ODBC API function headers. - namespace arrow { SQLRETURN SQLAllocHandle(SQLSMALLINT type, SQLHANDLE parent, SQLHANDLE* result); SQLRETURN SQLFreeHandle(SQLSMALLINT type, SQLHANDLE handle); @@ -40,4 +39,17 @@ SQLRETURN SQLGetEnvAttr(SQLHENV env, SQLINTEGER attr, SQLPOINTER valuePtr, SQLINTEGER bufferLen, SQLINTEGER* strLenPtr); SQLRETURN SQLSetEnvAttr(SQLHENV env, SQLINTEGER attr, SQLPOINTER valuePtr, SQLINTEGER strLen); +SQLRETURN SQLDriverConnectW(SQLHDBC conn, SQLHWND windowHandle, + SQLWCHAR* inConnectionString, + SQLSMALLINT inConnectionStringLen, + SQLWCHAR* outConnectionString, + SQLSMALLINT outConnectionStringBufferLen, + SQLSMALLINT* outConnectionStringLen, + SQLUSMALLINT driverCompletion); +SQLRETURN SQLConnectW(SQLHDBC conn, SQLWCHAR* dsnName, SQLSMALLINT dsnNameLen, + SQLWCHAR* userName, SQLSMALLINT userNameLen, SQLWCHAR* password, + SQLSMALLINT passwordLen); +SQLRETURN SQLDisconnect(SQLHDBC conn); +SQLRETURN SQLGetInfoW(SQLHDBC conn, SQLUSMALLINT infoType, SQLPOINTER infoValuePtr, + SQLSMALLINT bufLen, SQLSMALLINT* length); } // namespace arrow diff --git a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/attribute_utils.h b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/attribute_utils.h index 7b3b457a35f..7cf52b6cc1d 100644 --- a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/attribute_utils.h +++ b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/attribute_utils.h @@ -45,12 +45,12 @@ inline void GetAttribute(T attributeValue, SQLPOINTER output, O outputSize, } template -inline SQLRETURN GetAttributeUTF8(const std::string& attributeValue, SQLPOINTER output, - O outputSize, O* outputLenPtr) { +inline SQLRETURN GetAttributeUTF8(const std::string_view& attributeValue, + SQLPOINTER output, O outputSize, O* outputLenPtr) { if (output) { size_t outputLenBeforeNul = std::min(static_cast(attributeValue.size()), static_cast(outputSize - 1)); - memcpy(output, attributeValue.c_str(), outputLenBeforeNul); + memcpy(output, attributeValue.data(), outputLenBeforeNul); reinterpret_cast(output)[outputLenBeforeNul] = '\0'; } @@ -65,8 +65,8 @@ inline SQLRETURN GetAttributeUTF8(const std::string& attributeValue, SQLPOINTER } template -inline SQLRETURN GetAttributeUTF8(const std::string& attributeValue, SQLPOINTER output, - O outputSize, O* outputLenPtr, +inline SQLRETURN GetAttributeUTF8(const std::string_view& attributeValue, + SQLPOINTER output, O outputSize, O* outputLenPtr, driver::odbcabstraction::Diagnostics& diagnostics) { SQLRETURN result = GetAttributeUTF8(attributeValue, output, outputSize, outputLenPtr); if (SQL_SUCCESS_WITH_INFO == result) { @@ -76,7 +76,7 @@ inline SQLRETURN GetAttributeUTF8(const std::string& attributeValue, SQLPOINTER } template -inline SQLRETURN GetAttributeSQLWCHAR(const std::string& attributeValue, +inline SQLRETURN GetAttributeSQLWCHAR(const std::string_view& attributeValue, bool isLengthInBytes, SQLPOINTER output, O outputSize, O* outputLenPtr) { size_t result = @@ -95,7 +95,7 @@ inline SQLRETURN GetAttributeSQLWCHAR(const std::string& attributeValue, } template -inline SQLRETURN GetAttributeSQLWCHAR(const std::string& attributeValue, +inline SQLRETURN GetAttributeSQLWCHAR(const std::string_view& attributeValue, bool isLengthInBytes, SQLPOINTER output, O outputSize, O* outputLenPtr, driver::odbcabstraction::Diagnostics& diagnostics) { @@ -108,7 +108,8 @@ inline SQLRETURN GetAttributeSQLWCHAR(const std::string& attributeValue, } template -inline SQLRETURN GetStringAttribute(bool isUnicode, const std::string& attributeValue, +inline SQLRETURN GetStringAttribute(bool isUnicode, + const std::string_view& attributeValue, bool isLengthInBytes, SQLPOINTER output, O outputSize, O* outputLenPtr, driver::odbcabstraction::Diagnostics& diagnostics) { diff --git a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/encoding_utils.h b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/encoding_utils.h index 01eae6f059a..23d9ca3e6d4 100644 --- a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/encoding_utils.h +++ b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/encoding_utils.h @@ -19,6 +19,9 @@ #include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/encoding.h" #include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/platform.h" +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/encoding.h" +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/platform.h" + #include #include #include @@ -34,10 +37,11 @@ namespace ODBC { using driver::odbcabstraction::DriverException; using driver::odbcabstraction::GetSqlWCharSize; using driver::odbcabstraction::Utf8ToWcs; +using driver::odbcabstraction::WcsToUtf8; // Return the number of bytes required for the conversion. template -inline size_t ConvertToSqlWChar(const std::string& str, SQLWCHAR* buffer, +inline size_t ConvertToSqlWChar(const std::string_view& str, SQLWCHAR* buffer, SQLLEN bufferSizeInBytes) { thread_local std::vector wstr; Utf8ToWcs(str.data(), str.size(), &wstr); @@ -63,7 +67,7 @@ inline size_t ConvertToSqlWChar(const std::string& str, SQLWCHAR* buffer, return valueLengthInBytes; } -inline size_t ConvertToSqlWChar(const std::string& str, SQLWCHAR* buffer, +inline size_t ConvertToSqlWChar(const std::string_view& str, SQLWCHAR* buffer, SQLLEN bufferSizeInBytes) { switch (GetSqlWCharSize()) { case sizeof(char16_t): @@ -77,4 +81,39 @@ inline size_t ConvertToSqlWChar(const std::string& str, SQLWCHAR* buffer, } } +/// \brief Convert buffer of SqlWchar to standard string +/// \param[in] wchar_msg SqlWchar to convert +/// \param[in] msg_len Number of characters in wchar_msg +/// \return wchar_msg in std::string format +inline std::string SqlWcharToString(SQLWCHAR* wchar_msg, SQLSMALLINT msg_len = SQL_NTS) { + if (wchar_msg == nullptr) { + return std::string(); + } + + thread_local std::vector utf8_str; + + if (msg_len == SQL_NTS) { + WcsToUtf8((void*)wchar_msg, &utf8_str); + } else { + WcsToUtf8((void*)wchar_msg, msg_len, &utf8_str); + } + + return std::string(utf8_str.begin(), utf8_str.end()); +} + +inline std::string SqlStringToString(const unsigned char* sqlStr, + int32_t sqlStrLen = SQL_NTS) { + std::string res; + + const char* sqlStrC = reinterpret_cast(sqlStr); + + if (!sqlStr) return res; + + if (sqlStrLen == SQL_NTS) + res.assign(sqlStrC); + else if (sqlStrLen > 0) + res.assign(sqlStrC, sqlStrLen); + + return res; +} } // namespace ODBC diff --git a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/odbc_connection.h b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/odbc_connection.h index e771f467e6e..966110502f8 100644 --- a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/odbc_connection.h +++ b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/odbc_connection.h @@ -41,6 +41,9 @@ class ODBCConnection : public ODBCHandle { ODBCConnection(const ODBCConnection&) = delete; ODBCConnection& operator=(const ODBCConnection&) = delete; + /// \brief Constructor for ODBCConnection. + /// \param[in] environment the parent environment. + /// \param[in] spiConnection the underlying spi connection. ODBCConnection(ODBCEnvironment& environment, std::shared_ptr spiConnection); @@ -48,6 +51,11 @@ class ODBCConnection : public ODBCHandle { const std::string& GetDSN() const; bool isConnected() const; + + /// \brief Connect to Arrow Flight SQL server. + /// \param[in] dsn the dsn name. + /// \param[in] properties the connection property map extracted from connection string. + /// \param[out] missing_properties report the properties that are missing void connect(std::string dsn, const driver::odbcabstraction::Connection::ConnPropertyMap& properties, std::vector& missing_properties); diff --git a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/spi/connection.h b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/spi/connection.h index 64b7e6a724b..ce86882c952 100644 --- a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/spi/connection.h +++ b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/spi/connection.h @@ -33,13 +33,13 @@ namespace odbcabstraction { /// \brief Case insensitive comparator struct CaseInsensitiveComparator { - bool operator()(const std::string_view& s1, const std::string_view& s2) const { + bool operator()(const std::string& s1, const std::string& s2) const { return boost::lexicographical_compare(s1, s2, boost::is_iless()); } }; // PropertyMap is case-insensitive for keys. -typedef std::map PropertyMap; +typedef std::map PropertyMap; class Statement; diff --git a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/odbc_impl/odbc_connection.cc b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/odbc_impl/odbc_connection.cc index 4d5d4dc3656..f28ee1789b0 100644 --- a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/odbc_impl/odbc_connection.cc +++ b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/odbc_impl/odbc_connection.cc @@ -85,11 +85,11 @@ void loadPropertiesFromDSN(const std::string& dsn, for (auto& key : keys) { outputBuffer.clear(); outputBuffer.resize(BUFFER_SIZE, '\0'); - SQLGetPrivateProfileString(dsn.c_str(), key.data(), "", &outputBuffer[0], - BUFFER_SIZE, "odbc.ini"); + SQLGetPrivateProfileString(dsn.c_str(), key.data(), "", &outputBuffer[0], BUFFER_SIZE, + "odbc.ini"); std::string value = std::string(&outputBuffer[0]); - auto propIter = properties.find(key); + auto propIter = properties.find(std::string(key)); if (propIter == properties.end()) { properties.emplace(std::make_pair(std::move(key), std::move(value))); } @@ -759,7 +759,6 @@ std::string ODBCConnection::getPropertiesFromConnString( if (!isDsnFirst) { isDriverFirst = true; } - continue; } // Strip wrapping curly braces. diff --git a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/utils.cc b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/utils.cc index f1d2d14744d..45dfbcf2e42 100644 --- a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/utils.cc +++ b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/utils.cc @@ -42,7 +42,7 @@ boost::optional AsBool(const std::string& value) { boost::optional AsBool(const Connection::ConnPropertyMap& connPropertyMap, const std::string_view& property_name) { - auto extracted_property = connPropertyMap.find(property_name); + auto extracted_property = connPropertyMap.find(std::string(property_name)); if (extracted_property != connPropertyMap.end()) { return AsBool(extracted_property->second); @@ -54,7 +54,7 @@ boost::optional AsBool(const Connection::ConnPropertyMap& connPropertyMap, boost::optional AsInt32(int32_t min_value, const Connection::ConnPropertyMap& connPropertyMap, const std::string_view& property_name) { - auto extracted_property = connPropertyMap.find(property_name); + auto extracted_property = connPropertyMap.find(std::string(property_name)); if (extracted_property != connPropertyMap.end()) { const int32_t stringColumnLength = std::stoi(extracted_property->second); diff --git a/cpp/src/arrow/flight/sql/odbc/tests/CMakeLists.txt b/cpp/src/arrow/flight/sql/odbc/tests/CMakeLists.txt index 161669b41df..1d0dce0bec4 100644 --- a/cpp/src/arrow/flight/sql/odbc/tests/CMakeLists.txt +++ b/cpp/src/arrow/flight/sql/odbc/tests/CMakeLists.txt @@ -19,8 +19,15 @@ add_custom_target(tests) include_directories(${ODBC_INCLUDE_DIRS}) +add_definitions(-DUNICODE=1) + add_arrow_test(connection_test SOURCES connection_test.cc + odbc_test_suite.cc + odbc_test_suite.h EXTRA_LINK_LIBS - ${ODBC_LIBRARIES}) + ${ODBC_LIBRARIES} + ${ODBCINST} + arrow_odbc_spi_impl + odbcabstraction) diff --git a/cpp/src/arrow/flight/sql/odbc/tests/README b/cpp/src/arrow/flight/sql/odbc/tests/README new file mode 100644 index 00000000000..8e43296edff --- /dev/null +++ b/cpp/src/arrow/flight/sql/odbc/tests/README @@ -0,0 +1,4 @@ +Prior to running the tests, set environment variable `ARROW_FLIGHT_SQL_ODBC_CONN` +to a valid connection string. +A valid connection string looks like: +driver={Apache Arrow Flight SQL ODBC Driver};HOST=localhost;port=32010;pwd=myPassword;uid=myName;useEncryption=false; diff --git a/cpp/src/arrow/flight/sql/odbc/tests/connection_test.cc b/cpp/src/arrow/flight/sql/odbc/tests/connection_test.cc index 7991ec40263..c6a39ccc361 100644 --- a/cpp/src/arrow/flight/sql/odbc/tests/connection_test.cc +++ b/cpp/src/arrow/flight/sql/odbc/tests/connection_test.cc @@ -14,6 +14,7 @@ // KIND, either express or implied. See the License for the // specific language governing permissions and limitations // under the License. +#include "arrow/flight/sql/odbc/tests/odbc_test_suite.h" #ifdef _WIN32 # include @@ -22,13 +23,13 @@ #include #include #include + #include "gtest/gtest.h" namespace arrow { namespace flight { namespace odbc { namespace integration_tests { - TEST(SQLAllocHandle, TestSQLAllocHandleEnv) { // ODBC Environment SQLHENV env; @@ -199,56 +200,49 @@ TEST(SQLSetEnvAttr, TestSQLSetEnvAttrODBCVersionInvalid) { EXPECT_TRUE(return_set == SQL_ERROR); } -TEST(SQLGetEnvAttr, TestSQLGetEnvAttrOutputNTS) { - // ODBC Environment - SQLHENV env; +TEST_F(FlightSQLODBCTestBase, TestSQLGetEnvAttrOutputNTS) { + connect(); SQLINTEGER output_nts; - // Allocate an environment handle - SQLRETURN return_env = SQLAllocEnv(&env); - - EXPECT_TRUE(return_env == SQL_SUCCESS); - SQLRETURN return_get = SQLGetEnvAttr(env, SQL_ATTR_OUTPUT_NTS, &output_nts, 0, 0); EXPECT_TRUE(return_get == SQL_SUCCESS); EXPECT_EQ(output_nts, SQL_TRUE); + + disconnect(); } -TEST(SQLGetEnvAttr, TestSQLGetEnvAttrGetLength) { +TEST_F(FlightSQLODBCTestBase, TestSQLGetEnvAttrGetLength) { + // Test is disabled because call to SQLGetEnvAttr is handled by the driver manager on + // Windows. This test case can be potentionally used on macOS/Linux GTEST_SKIP(); - // ODBC Environment - SQLHENV env; - - SQLINTEGER length; - // Allocate an environment handle - SQLRETURN return_env = SQLAllocEnv(&env); + connect(); - EXPECT_TRUE(return_env == SQL_SUCCESS); + SQLINTEGER length; SQLRETURN return_get = SQLGetEnvAttr(env, SQL_ATTR_ODBC_VERSION, nullptr, 0, &length); EXPECT_TRUE(return_get == SQL_SUCCESS); EXPECT_EQ(length, sizeof(SQLINTEGER)); + + disconnect(); } -TEST(SQLGetEnvAttr, TestSQLGetEnvAttrNullValuePointer) { +TEST_F(FlightSQLODBCTestBase, TestSQLGetEnvAttrNullValuePointer) { + // Test is disabled because call to SQLGetEnvAttr is handled by the driver manager on + // Windows. This test case can be potentionally used on macOS/Linux GTEST_SKIP(); - // ODBC Environment - SQLHENV env; - - // Allocate an environment handle - SQLRETURN return_env = SQLAllocEnv(&env); - - EXPECT_TRUE(return_env == SQL_SUCCESS); + connect(); SQLRETURN return_get = SQLGetEnvAttr(env, SQL_ATTR_ODBC_VERSION, nullptr, 0, nullptr); EXPECT_TRUE(return_get == SQL_ERROR); + + disconnect(); } TEST(SQLSetEnvAttr, TestSQLSetEnvAttrOutputNTSValid) { @@ -293,12 +287,466 @@ TEST(SQLSetEnvAttr, TestSQLSetEnvAttrNullValuePointer) { EXPECT_TRUE(return_env == SQL_SUCCESS); // Attempt to set using bad data pointer - SQLRETURN return_set = - SQLSetEnvAttr(env, SQL_ATTR_ODBC_VERSION, nullptr, 0); + SQLRETURN return_set = SQLSetEnvAttr(env, SQL_ATTR_ODBC_VERSION, nullptr, 0); EXPECT_TRUE(return_set == SQL_ERROR); } +TEST(SQLDriverConnect, TestSQLDriverConnect) { + // ODBC Environment + SQLHENV env; + SQLHDBC conn; + + // Allocate an environment handle + SQLRETURN ret = SQLAllocEnv(&env); + + EXPECT_TRUE(ret == SQL_SUCCESS); + + ret = SQLSetEnvAttr(env, SQL_ATTR_ODBC_VERSION, (void*)SQL_OV_ODBC3, 0); + + EXPECT_TRUE(ret == SQL_SUCCESS); + + // Allocate a connection using alloc handle + ret = SQLAllocHandle(SQL_HANDLE_DBC, env, &conn); + + EXPECT_TRUE(ret == SQL_SUCCESS); + + // Connect string + ASSERT_OK_AND_ASSIGN(std::string connect_str, + arrow::internal::GetEnvVar(TEST_CONNECT_STR)); + ASSERT_OK_AND_ASSIGN(std::wstring wconnect_str, + arrow::util::UTF8ToWideString(connect_str)); + std::vector connect_str0(wconnect_str.begin(), wconnect_str.end()); + + SQLWCHAR outstr[ODBC_BUFFER_SIZE] = L""; + SQLSMALLINT outstrlen; + + // Connecting to ODBC server. + ret = SQLDriverConnect(conn, NULL, &connect_str0[0], + static_cast(connect_str0.size()), outstr, + ODBC_BUFFER_SIZE, &outstrlen, SQL_DRIVER_NOPROMPT); + + if (ret != SQL_SUCCESS) { + std::cerr << GetOdbcErrorMessage(SQL_HANDLE_DBC, conn) << std::endl; + } + + EXPECT_TRUE(ret == SQL_SUCCESS); + + // Check that outstr has same content as connect_str + std::string out_connection_string = ODBC::SqlWcharToString(outstr, outstrlen); + Connection::ConnPropertyMap out_properties; + Connection::ConnPropertyMap in_properties; + ODBC::ODBCConnection::getPropertiesFromConnString(out_connection_string, + out_properties); + ODBC::ODBCConnection::getPropertiesFromConnString(connect_str, in_properties); + EXPECT_TRUE(compareConnPropertyMap(out_properties, in_properties)); + + // Disconnect from ODBC + ret = SQLDisconnect(conn); + + if (ret != SQL_SUCCESS) { + std::cerr << GetOdbcErrorMessage(SQL_HANDLE_DBC, conn) << std::endl; + } + + EXPECT_TRUE(ret == SQL_SUCCESS); + + // Free connection handle + ret = SQLFreeHandle(SQL_HANDLE_DBC, conn); + + EXPECT_TRUE(ret == SQL_SUCCESS); + + // Free environment handle + ret = SQLFreeHandle(SQL_HANDLE_ENV, env); + + EXPECT_TRUE(ret == SQL_SUCCESS); +} + +TEST(SQLDriverConnect, TestSQLDriverConnectInvalidUid) { + // ODBC Environment + SQLHENV env; + SQLHDBC conn; + + // Allocate an environment handle + SQLRETURN ret = SQLAllocEnv(&env); + + EXPECT_TRUE(ret == SQL_SUCCESS); + + ret = SQLSetEnvAttr(env, SQL_ATTR_ODBC_VERSION, (void*)SQL_OV_ODBC3, 0); + + EXPECT_TRUE(ret == SQL_SUCCESS); + + // Allocate a connection using alloc handle + ret = SQLAllocHandle(SQL_HANDLE_DBC, env, &conn); + + EXPECT_TRUE(ret == SQL_SUCCESS); + + // Connect string + ASSERT_OK_AND_ASSIGN(std::string connect_str, + arrow::internal::GetEnvVar(TEST_CONNECT_STR)); + // Append invalid uid to connection string + connect_str += std::string("uid=non_existent_id;"); + + ASSERT_OK_AND_ASSIGN(std::wstring wconnect_str, + arrow::util::UTF8ToWideString(connect_str)); + std::vector connect_str0(wconnect_str.begin(), wconnect_str.end()); + + SQLWCHAR outstr[ODBC_BUFFER_SIZE]; + SQLSMALLINT outstrlen; + + // Connecting to ODBC server. + ret = SQLDriverConnect(conn, NULL, &connect_str0[0], + static_cast(connect_str0.size()), outstr, + ODBC_BUFFER_SIZE, &outstrlen, SQL_DRIVER_NOPROMPT); + + EXPECT_TRUE(ret == SQL_ERROR); + + // TODO uncomment this check when SQLGetDiagRec is implemented + // VerifyOdbcErrorState(SQL_HANDLE_DBC, conn, std::string("28000")); + + // TODO: Check that outstr remains empty after SqlWcharToString + // is fixed to handle empty `outstr` + // std::string out_connection_string = ODBC::SqlWcharToString(outstr, outstrlen); + // EXPECT_TRUE(out_connection_string.empty()); + + // Free connection handle + ret = SQLFreeHandle(SQL_HANDLE_DBC, conn); + + EXPECT_TRUE(ret == SQL_SUCCESS); + + // Free environment handle + ret = SQLFreeHandle(SQL_HANDLE_ENV, env); + + EXPECT_TRUE(ret == SQL_SUCCESS); +} + +TEST(SQLConnect, TestSQLConnect) { + // ODBC Environment + SQLHENV env; + SQLHDBC conn; + + // Allocate an environment handle + SQLRETURN ret = SQLAllocEnv(&env); + + EXPECT_TRUE(ret == SQL_SUCCESS); + + ret = SQLSetEnvAttr(env, SQL_ATTR_ODBC_VERSION, (void*)SQL_OV_ODBC3, 0); + + EXPECT_TRUE(ret == SQL_SUCCESS); + + // Allocate a connection using alloc handle + ret = SQLAllocHandle(SQL_HANDLE_DBC, env, &conn); + + EXPECT_TRUE(ret == SQL_SUCCESS); + + // Connect string + ASSERT_OK_AND_ASSIGN(std::string connect_str, + arrow::internal::GetEnvVar(TEST_CONNECT_STR)); + + // Write connection string content into a DSN, + // must succeed before continuing + std::string uid(""), pwd(""); + ASSERT_TRUE(writeDSN(connect_str)); + + std::string dsn(TEST_DSN); + ASSERT_OK_AND_ASSIGN(std::wstring wdsn, arrow::util::UTF8ToWideString(dsn)); + ASSERT_OK_AND_ASSIGN(std::wstring wuid, arrow::util::UTF8ToWideString(uid)); + ASSERT_OK_AND_ASSIGN(std::wstring wpwd, arrow::util::UTF8ToWideString(pwd)); + std::vector dsn0(wdsn.begin(), wdsn.end()); + std::vector uid0(wuid.begin(), wuid.end()); + std::vector pwd0(wpwd.begin(), wpwd.end()); + + // Connecting to ODBC server. + ret = SQLConnect(conn, dsn0.data(), static_cast(dsn0.size()), uid0.data(), + static_cast(uid0.size()), pwd0.data(), + static_cast(pwd0.size())); + + if (ret != SQL_SUCCESS) { + std::cerr << GetOdbcErrorMessage(SQL_HANDLE_DBC, conn) << std::endl; + } + + EXPECT_TRUE(ret == SQL_SUCCESS); + + // Remove DSN + EXPECT_TRUE(UnregisterDsn(dsn)); + + // Disconnect from ODBC + ret = SQLDisconnect(conn); + + if (ret != SQL_SUCCESS) { + std::cerr << GetOdbcErrorMessage(SQL_HANDLE_DBC, conn) << std::endl; + } + + EXPECT_TRUE(ret == SQL_SUCCESS); + + // Free connection handle + ret = SQLFreeHandle(SQL_HANDLE_DBC, conn); + + EXPECT_TRUE(ret == SQL_SUCCESS); + + // Free environment handle + ret = SQLFreeHandle(SQL_HANDLE_ENV, env); + + EXPECT_TRUE(ret == SQL_SUCCESS); +} + +TEST(SQLConnect, TestSQLConnectInputUidPwd) { + // ODBC Environment + SQLHENV env; + SQLHDBC conn; + + // Allocate an environment handle + SQLRETURN ret = SQLAllocEnv(&env); + + EXPECT_TRUE(ret == SQL_SUCCESS); + + ret = SQLSetEnvAttr(env, SQL_ATTR_ODBC_VERSION, (void*)SQL_OV_ODBC3, 0); + + EXPECT_TRUE(ret == SQL_SUCCESS); + + // Allocate a connection using alloc handle + ret = SQLAllocHandle(SQL_HANDLE_DBC, env, &conn); + + EXPECT_TRUE(ret == SQL_SUCCESS); + + // Connect string + ASSERT_OK_AND_ASSIGN(std::string connect_str, + arrow::internal::GetEnvVar(TEST_CONNECT_STR)); + + // Retrieve valid uid and pwd + Connection::ConnPropertyMap properties; + ODBC::ODBCConnection::getPropertiesFromConnString(connect_str, properties); + std::string uid_key("uid"); + std::string pwd_key("pwd"); + std::string uid = properties[uid_key]; + std::string pwd = properties[pwd_key]; + + // Write connection string content without uid and pwd into a DSN, + // must succeed before continuing + properties.erase(uid_key); + properties.erase(pwd_key); + ASSERT_TRUE(writeDSN(properties)); + + std::string dsn(TEST_DSN); + ASSERT_OK_AND_ASSIGN(std::wstring wdsn, arrow::util::UTF8ToWideString(dsn)); + ASSERT_OK_AND_ASSIGN(std::wstring wuid, arrow::util::UTF8ToWideString(uid)); + ASSERT_OK_AND_ASSIGN(std::wstring wpwd, arrow::util::UTF8ToWideString(pwd)); + std::vector dsn0(wdsn.begin(), wdsn.end()); + std::vector uid0(wuid.begin(), wuid.end()); + std::vector pwd0(wpwd.begin(), wpwd.end()); + + // Connecting to ODBC server. + ret = SQLConnect(conn, dsn0.data(), static_cast(dsn0.size()), uid0.data(), + static_cast(uid0.size()), pwd0.data(), + static_cast(pwd0.size())); + + if (ret != SQL_SUCCESS) { + std::cerr << GetOdbcErrorMessage(SQL_HANDLE_DBC, conn) << std::endl; + } + + EXPECT_TRUE(ret == SQL_SUCCESS); + + // Remove DSN + EXPECT_TRUE(UnregisterDsn(dsn)); + + // Disconnect from ODBC + ret = SQLDisconnect(conn); + + if (ret != SQL_SUCCESS) { + std::cerr << GetOdbcErrorMessage(SQL_HANDLE_DBC, conn) << std::endl; + } + + EXPECT_TRUE(ret == SQL_SUCCESS); + + // Free connection handle + ret = SQLFreeHandle(SQL_HANDLE_DBC, conn); + + EXPECT_TRUE(ret == SQL_SUCCESS); + + // Free environment handle + ret = SQLFreeHandle(SQL_HANDLE_ENV, env); + + EXPECT_TRUE(ret == SQL_SUCCESS); +} + +TEST(SQLConnect, TestSQLConnectInvalidUid) { + // ODBC Environment + SQLHENV env; + SQLHDBC conn; + + // Allocate an environment handle + SQLRETURN ret = SQLAllocEnv(&env); + + EXPECT_TRUE(ret == SQL_SUCCESS); + + ret = SQLSetEnvAttr(env, SQL_ATTR_ODBC_VERSION, (void*)SQL_OV_ODBC3, 0); + + EXPECT_TRUE(ret == SQL_SUCCESS); + + // Allocate a connection using alloc handle + ret = SQLAllocHandle(SQL_HANDLE_DBC, env, &conn); + + EXPECT_TRUE(ret == SQL_SUCCESS); + + // Connect string + ASSERT_OK_AND_ASSIGN(std::string connect_str, + arrow::internal::GetEnvVar(TEST_CONNECT_STR)); + + // Retrieve valid uid and pwd + Connection::ConnPropertyMap properties; + ODBC::ODBCConnection::getPropertiesFromConnString(connect_str, properties); + std::string uid = properties[std::string("uid")]; + std::string pwd = properties[std::string("pwd")]; + + // Append invalid uid to connection string + connect_str += std::string("uid=non_existent_id;"); + + // Write connection string content into a DSN, + // must succeed before continuing + ASSERT_TRUE(writeDSN(connect_str)); + + std::string dsn(TEST_DSN); + ASSERT_OK_AND_ASSIGN(std::wstring wdsn, arrow::util::UTF8ToWideString(dsn)); + ASSERT_OK_AND_ASSIGN(std::wstring wuid, arrow::util::UTF8ToWideString(uid)); + ASSERT_OK_AND_ASSIGN(std::wstring wpwd, arrow::util::UTF8ToWideString(pwd)); + std::vector dsn0(wdsn.begin(), wdsn.end()); + std::vector uid0(wuid.begin(), wuid.end()); + std::vector pwd0(wpwd.begin(), wpwd.end()); + + // Connecting to ODBC server. + ret = SQLConnect(conn, dsn0.data(), static_cast(dsn0.size()), uid0.data(), + static_cast(uid0.size()), pwd0.data(), + static_cast(pwd0.size())); + + // UID specified in DSN will take precedence, + // so connection still fails despite passing valid uid in SQLConnect call + EXPECT_TRUE(ret == SQL_ERROR); + + // TODO uncomment this check when SQLGetDiagRec is implemented + // VerifyOdbcErrorState(SQL_HANDLE_DBC, conn, std::string("28000")); + + // Remove DSN + EXPECT_TRUE(UnregisterDsn(dsn)); + + // Free connection handle + ret = SQLFreeHandle(SQL_HANDLE_DBC, conn); + + EXPECT_TRUE(ret == SQL_SUCCESS); + + // Free environment handle + ret = SQLFreeHandle(SQL_HANDLE_ENV, env); + + EXPECT_TRUE(ret == SQL_SUCCESS); +} + +TEST(SQLConnect, TestSQLConnectDSNPrecedence) { + // ODBC Environment + SQLHENV env; + SQLHDBC conn; + + // Allocate an environment handle + SQLRETURN ret = SQLAllocEnv(&env); + + EXPECT_TRUE(ret == SQL_SUCCESS); + + ret = SQLSetEnvAttr(env, SQL_ATTR_ODBC_VERSION, (void*)SQL_OV_ODBC3, 0); + + EXPECT_TRUE(ret == SQL_SUCCESS); + + // Allocate a connection using alloc handle + ret = SQLAllocHandle(SQL_HANDLE_DBC, env, &conn); + + EXPECT_TRUE(ret == SQL_SUCCESS); + + // Connect string + ASSERT_OK_AND_ASSIGN(std::string connect_str, + arrow::internal::GetEnvVar(TEST_CONNECT_STR)); + + // Write connection string content into a DSN, + // must succeed before continuing + + // Pass incorrect uid and password to SQLConnect, they will be ignored + std::string uid("non_existent_id"), pwd("non_existent_password"); + ASSERT_TRUE(writeDSN(connect_str)); + + std::string dsn(TEST_DSN); + ASSERT_OK_AND_ASSIGN(std::wstring wdsn, arrow::util::UTF8ToWideString(dsn)); + ASSERT_OK_AND_ASSIGN(std::wstring wuid, arrow::util::UTF8ToWideString(uid)); + ASSERT_OK_AND_ASSIGN(std::wstring wpwd, arrow::util::UTF8ToWideString(pwd)); + std::vector dsn0(wdsn.begin(), wdsn.end()); + std::vector uid0(wuid.begin(), wuid.end()); + std::vector pwd0(wpwd.begin(), wpwd.end()); + + // Connecting to ODBC server. + ret = SQLConnect(conn, dsn0.data(), static_cast(dsn0.size()), uid0.data(), + static_cast(uid0.size()), pwd0.data(), + static_cast(pwd0.size())); + + if (ret != SQL_SUCCESS) { + std::cerr << GetOdbcErrorMessage(SQL_HANDLE_DBC, conn) << std::endl; + } + + EXPECT_TRUE(ret == SQL_SUCCESS); + + // Remove DSN + EXPECT_TRUE(UnregisterDsn(dsn)); + + // Disconnect from ODBC + ret = SQLDisconnect(conn); + + if (ret != SQL_SUCCESS) { + std::cerr << GetOdbcErrorMessage(SQL_HANDLE_DBC, conn) << std::endl; + } + + EXPECT_TRUE(ret == SQL_SUCCESS); + + // Free connection handle + ret = SQLFreeHandle(SQL_HANDLE_DBC, conn); + + EXPECT_TRUE(ret == SQL_SUCCESS); + + // Free environment handle + ret = SQLFreeHandle(SQL_HANDLE_ENV, env); + + EXPECT_TRUE(ret == SQL_SUCCESS); +} + +TEST(SQLDisconnect, TestSQLDisconnectWithoutConnection) { + // ODBC Environment + SQLHENV env; + SQLHDBC conn; + + // Allocate an environment handle + SQLRETURN ret = SQLAllocEnv(&env); + + EXPECT_TRUE(ret == SQL_SUCCESS); + + ret = SQLSetEnvAttr(env, SQL_ATTR_ODBC_VERSION, (void*)SQL_OV_ODBC3, 0); + + EXPECT_TRUE(ret == SQL_SUCCESS); + + // Allocate a connection using alloc handle + ret = SQLAllocHandle(SQL_HANDLE_DBC, env, &conn); + + EXPECT_TRUE(ret == SQL_SUCCESS); + + // Attempt to disconnect without a connection, expect to fail + ret = SQLDisconnect(conn); + + EXPECT_TRUE(ret == SQL_ERROR); + + // Expect ODBC driver manager to return error state + VerifyOdbcErrorState(SQL_HANDLE_DBC, conn, std::string("08003")); + + // Free connection handle + ret = SQLFreeHandle(SQL_HANDLE_DBC, conn); + + EXPECT_TRUE(ret == SQL_SUCCESS); + + // Free environment handle + ret = SQLFreeHandle(SQL_HANDLE_ENV, env); + + EXPECT_TRUE(ret == SQL_SUCCESS); +} } // namespace integration_tests } // namespace odbc } // namespace flight diff --git a/cpp/src/arrow/flight/sql/odbc/tests/odbc_test_suite.cc b/cpp/src/arrow/flight/sql/odbc/tests/odbc_test_suite.cc new file mode 100644 index 00000000000..d60bcea19e4 --- /dev/null +++ b/cpp/src/arrow/flight/sql/odbc/tests/odbc_test_suite.cc @@ -0,0 +1,172 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// For DSN registration. flight_sql_connection.h needs to included first due to conflicts +// with windows.h +#include "arrow/flight/sql/odbc/flight_sql/flight_sql_connection.h" + +#include "arrow/flight/sql/odbc/tests/odbc_test_suite.h" + +// For DSN registration +#include "arrow/flight/sql/odbc/flight_sql/include/flight_sql/config/configuration.h" +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/odbc_connection.h" + +namespace arrow { +namespace flight { +namespace odbc { +namespace integration_tests { +void FlightSQLODBCTestBase::connect() { + // Allocate an environment handle + SQLRETURN ret = SQLAllocEnv(&env); + + EXPECT_TRUE(ret == SQL_SUCCESS); + + ret = SQLSetEnvAttr(env, SQL_ATTR_ODBC_VERSION, (void*)SQL_OV_ODBC3, 0); + + EXPECT_TRUE(ret == SQL_SUCCESS); + + // Allocate a connection using alloc handle + ret = SQLAllocHandle(SQL_HANDLE_DBC, env, &conn); + + EXPECT_TRUE(ret == SQL_SUCCESS); + + // Connect string + ASSERT_OK_AND_ASSIGN(std::string connect_str, + arrow::internal::GetEnvVar(TEST_CONNECT_STR)); + std::vector connect_str0(connect_str.begin(), connect_str.end()); + + SQLWCHAR outstr[ODBC_BUFFER_SIZE]; + SQLSMALLINT outstrlen; + + // Connecting to ODBC server. + ret = SQLDriverConnect(conn, NULL, &connect_str0[0], + static_cast(connect_str0.size()), outstr, + ODBC_BUFFER_SIZE, &outstrlen, SQL_DRIVER_NOPROMPT); + + if (ret != SQL_SUCCESS) { + std::cerr << GetOdbcErrorMessage(SQL_HANDLE_DBC, conn) << std::endl; + } + + // Assert connection is successful before we continue + ASSERT_TRUE(ret == SQL_SUCCESS); +} + +void FlightSQLODBCTestBase::disconnect() { + // Disconnect from ODBC + SQLRETURN ret = SQLDisconnect(conn); + + if (ret != SQL_SUCCESS) { + std::cerr << GetOdbcErrorMessage(SQL_HANDLE_DBC, conn) << std::endl; + } + + EXPECT_TRUE(ret == SQL_SUCCESS); + + // Free connection handle + ret = SQLFreeHandle(SQL_HANDLE_DBC, conn); + + EXPECT_TRUE(ret == SQL_SUCCESS); + + // Free environment handle + ret = SQLFreeHandle(SQL_HANDLE_ENV, env); + + EXPECT_TRUE(ret == SQL_SUCCESS); +} + +bool compareConnPropertyMap(Connection::ConnPropertyMap map1, + Connection::ConnPropertyMap map2) { + if (map1.size() != map2.size()) return false; + + for (const auto& [key, value] : map1) { + if (value != map2[key]) return false; + } + + return true; +} + +void VerifyOdbcErrorState(SQLSMALLINT handle_type, SQLHANDLE handle, + std::string expected_state) { + using ODBC::SqlWcharToString; + + SQLWCHAR sql_state[7] = {}; + SQLINTEGER native_code; + + SQLWCHAR message[ODBC_BUFFER_SIZE] = {}; + SQLSMALLINT reallen = 0; + + // On Windows, reallen is in bytes. On Linux, reallen is in chars. + // So, not using reallen + SQLGetDiagRec(handle_type, handle, 1, sql_state, &native_code, message, + ODBC_BUFFER_SIZE, &reallen); + + std::string res = SqlWcharToString(sql_state); + + EXPECT_EQ(res, expected_state); +} + +std::string GetOdbcErrorMessage(SQLSMALLINT handle_type, SQLHANDLE handle) { + using ODBC::SqlWcharToString; + + SQLWCHAR sql_state[7] = {}; + SQLINTEGER native_code; + + SQLWCHAR message[ODBC_BUFFER_SIZE] = {}; + SQLSMALLINT reallen = 0; + + // On Windows, reallen is in bytes. On Linux, reallen is in chars. + // So, not using reallen + SQLGetDiagRec(handle_type, handle, 1, sql_state, &native_code, message, + ODBC_BUFFER_SIZE, &reallen); + + std::string res = SqlWcharToString(sql_state); + + if (res.empty() || !message[0]) { + res = "Cannot find ODBC error message"; + } else { + res.append(": ").append(SqlWcharToString(message)); + } + + return res; +} + +bool writeDSN(std::string connection_str) { + Connection::ConnPropertyMap properties; + + ODBC::ODBCConnection::getPropertiesFromConnString(connection_str, properties); + return writeDSN(properties); +} + +bool writeDSN(Connection::ConnPropertyMap properties) { + using driver::flight_sql::FlightSqlConnection; + using driver::flight_sql::config::Configuration; + using driver::odbcabstraction::Connection; + using ODBC::ODBCConnection; + + Configuration config; + config.Set(FlightSqlConnection::DSN, std::string(TEST_DSN)); + + for (const auto& [key, value] : properties) { + config.Set(key, value); + } + + std::string driver = config.Get(FlightSqlConnection::DRIVER); + + return RegisterDsn(config, driver.c_str()); +} +} // namespace integration_tests +} // namespace odbc +} // namespace flight +} // namespace arrow diff --git a/cpp/src/arrow/flight/sql/odbc/tests/odbc_test_suite.h b/cpp/src/arrow/flight/sql/odbc/tests/odbc_test_suite.h new file mode 100644 index 00000000000..49ab2e20f44 --- /dev/null +++ b/cpp/src/arrow/flight/sql/odbc/tests/odbc_test_suite.h @@ -0,0 +1,90 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +#include "arrow/testing/gtest_util.h" +#include "arrow/util/io_util.h" +#include "arrow/util/utf8.h" + +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/encoding_utils.h" + +#ifdef _WIN32 +# include +#endif + +#include +#include +#include + +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/odbc_connection.h" + +// For DSN registration +#include "arrow/flight/sql/odbc/flight_sql/system_dsn.h" + +#define TEST_CONNECT_STR "ARROW_FLIGHT_SQL_ODBC_CONN" +#define TEST_DSN "Apache Arrow Flight SQL Test DSN" + +namespace arrow { +namespace flight { +namespace odbc { +namespace integration_tests { +using driver::odbcabstraction::Connection; + +class FlightSQLODBCTestBase : public ::testing::Test { + public: + /// \brief Connect to Arrow Flight SQL server using connection string defined in + /// environment variable "ARROW_FLIGHT_SQL_ODBC_CONN" + void connect(); + /// \brief Disconnect from server + void disconnect(); + + /** ODBC Environment. */ + SQLHENV env; + + /** ODBC Connect. */ + SQLHDBC conn; + + /** ODBC Statement. */ + SQLHSTMT stmt; +}; + +/** ODBC read buffer size. */ +enum { ODBC_BUFFER_SIZE = 1024 }; + +/// Compare ConnPropertyMap, key value is case-insensitive +bool compareConnPropertyMap(Connection::ConnPropertyMap map1, + Connection::ConnPropertyMap map2); + +/// Get error message from ODBC driver using SQLGetDiagRec +std::string GetOdbcErrorMessage(SQLSMALLINT handle_type, SQLHANDLE handle); + +/// Verify ODBC Error State +void VerifyOdbcErrorState(SQLSMALLINT handle_type, SQLHANDLE handle, + std::string expected_state); + +/// \brief Write connection string into DSN +/// \param[in] connection_str the connection string. +/// \return true on success +bool writeDSN(std::string connection_str); + +/// \brief Write properties map into DSN +/// \param[in] properties map. +/// \return true on success +bool writeDSN(Connection::ConnPropertyMap properties); +} // namespace integration_tests +} // namespace odbc +} // namespace flight +} // namespace arrow From 9a3b3deeb7ba78a66975912830623706980a95bc Mon Sep 17 00:00:00 2001 From: "Alina (Xi) Li" Date: Thu, 22 May 2025 16:26:21 -0700 Subject: [PATCH 03/63] Add odbc.def and cmd file to rat_exclude --- dev/release/rat_exclude_files.txt | 2 ++ 1 file changed, 2 insertions(+) diff --git a/dev/release/rat_exclude_files.txt b/dev/release/rat_exclude_files.txt index 0c827421fe8..ad6841bf483 100644 --- a/dev/release/rat_exclude_files.txt +++ b/dev/release/rat_exclude_files.txt @@ -13,6 +13,8 @@ ci/vcpkg/*.patch CHANGELOG.md cpp/CHANGELOG_PARQUET.md cpp/src/arrow/c/dlpack_abi.h +cpp/src/arrow/flight/sql/odbc/odbc.def +cpp/src/arrow/flight/sql/odbc/install/install_amd64.cmd cpp/src/arrow/io/mman.h cpp/src/arrow/util/random.h cpp/src/arrow/status.cc From cc0f6e929e1ba8ef5d8b0661c892119dbb36cb1c Mon Sep 17 00:00:00 2001 From: "Alina (Xi) Li" Date: Fri, 23 May 2025 16:45:00 -0700 Subject: [PATCH 04/63] Nit - remove duplicate lines Accidentally committed the change during git rebase --- .../include/odbcabstraction/odbc_impl/encoding_utils.h | 3 --- 1 file changed, 3 deletions(-) diff --git a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/encoding_utils.h b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/encoding_utils.h index 23d9ca3e6d4..2d898066583 100644 --- a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/encoding_utils.h +++ b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/encoding_utils.h @@ -19,9 +19,6 @@ #include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/encoding.h" #include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/platform.h" -#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/encoding.h" -#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/platform.h" - #include #include #include From 729b89946b3238828f7bce2c57c7a4a43533f3b0 Mon Sep 17 00:00:00 2001 From: "Alina (Xi) Li" Date: Fri, 23 May 2025 16:59:39 -0700 Subject: [PATCH 05/63] Nit - remove usage of nullptr --- .../include/odbcabstraction/odbc_impl/encoding_utils.h | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/encoding_utils.h b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/encoding_utils.h index 2d898066583..45ed8713626 100644 --- a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/encoding_utils.h +++ b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/encoding_utils.h @@ -83,7 +83,7 @@ inline size_t ConvertToSqlWChar(const std::string_view& str, SQLWCHAR* buffer, /// \param[in] msg_len Number of characters in wchar_msg /// \return wchar_msg in std::string format inline std::string SqlWcharToString(SQLWCHAR* wchar_msg, SQLSMALLINT msg_len = SQL_NTS) { - if (wchar_msg == nullptr) { + if (!wchar_msg) { return std::string(); } From e1b1677dbd8fe0bcdd6dfc064abd81d856e4eed2 Mon Sep 17 00:00:00 2001 From: "Alina (Xi) Li" Date: Thu, 22 May 2025 16:46:48 -0700 Subject: [PATCH 06/63] DSN window implementation --- .../flight/sql/odbc/flight_sql/system_dsn.h | 14 +++++++ .../sql/odbc/flight_sql/win_system_dsn.cc | 18 +++++++- cpp/src/arrow/flight/sql/odbc/odbc_api.cc | 42 +++++++++++++++---- 3 files changed, 65 insertions(+), 9 deletions(-) diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/system_dsn.h b/cpp/src/arrow/flight/sql/odbc/flight_sql/system_dsn.h index 535a063269e..f5470693eea 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/system_dsn.h +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/system_dsn.h @@ -21,6 +21,7 @@ #include "arrow/flight/sql/odbc/flight_sql/include/flight_sql/config/configuration.h" using driver::flight_sql::config::Configuration; +using driver::odbcabstraction::Connection; #if defined _WIN32 || defined _WIN64 /** @@ -31,6 +32,19 @@ using driver::flight_sql::config::Configuration; * @return True on success and false on fail. */ bool DisplayConnectionWindow(void* windowParent, Configuration& config); + +/** + * For SQLDriverConnect. + * Display connection window for user to configure connection parameters. + * + * @param windowParent Parent window handle. + * @param config Output configuration, presumed to be empty, it will be using values from + * properties. + * @param config Output properties. + * @return True on success and false on fail. + */ +bool DisplayConnectionWindow(void* windowParent, Configuration& config, + Connection::ConnPropertyMap& properties); #endif /** diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/win_system_dsn.cc b/cpp/src/arrow/flight/sql/odbc/flight_sql/win_system_dsn.cc index a2bf2565610..5c8d116a7bc 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/win_system_dsn.cc +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/win_system_dsn.cc @@ -41,6 +41,7 @@ using driver::flight_sql::config::ConnectionStringParser; using driver::flight_sql::config::DsnConfigurationWindow; using driver::flight_sql::config::Result; using driver::flight_sql::config::Window; +using driver::odbcabstraction::DriverException; BOOL CALLBACK ConfigDriver(HWND hwndParent, WORD fRequest, LPCSTR lpszDriver, LPCSTR lpszArgs, LPSTR lpszMsg, WORD cbMsgMax, @@ -63,7 +64,7 @@ bool DisplayConnectionWindow(void* windowParent, Configuration& config) { window.Update(); return ProcessMessages(window) == Result::OK; - } catch (const driver::odbcabstraction::DriverException& err) { + } catch (const DriverException& err) { std::stringstream buf; buf << "SQL State: " << err.GetSqlState() << ", Message: " << err.GetMessageText() << ", Code: " << err.GetNativeError(); @@ -76,6 +77,21 @@ bool DisplayConnectionWindow(void* windowParent, Configuration& config) { return false; } +bool DisplayConnectionWindow(void* windowParent, Configuration& config, + Connection::ConnPropertyMap& properties) { + for (const auto& [key, value] : properties) { + config.Set(key, value); + } + + if (DisplayConnectionWindow(windowParent, config)) { + properties = config.GetProperties(); + return true; + } else { + // TODO: log cancelled dialog after logging is enabled. + return false; + } +} + BOOL INSTAPI ConfigDSN(HWND hwndParent, WORD req, LPCSTR driver, LPCSTR attributes) { Configuration config; ConnectionStringParser parser(config); diff --git a/cpp/src/arrow/flight/sql/odbc/odbc_api.cc b/cpp/src/arrow/flight/sql/odbc/odbc_api.cc index 9d2df40c05a..9368d6b9b30 100644 --- a/cpp/src/arrow/flight/sql/odbc/odbc_api.cc +++ b/cpp/src/arrow/flight/sql/odbc/odbc_api.cc @@ -354,6 +354,7 @@ SQLRETURN SQLDriverConnectW(SQLHDBC conn, SQLHWND windowHandle, // spec https://github.com/apache/arrow/issues/46560 using driver::odbcabstraction::Connection; + using driver::odbcabstraction::DriverException; using ODBC::ODBCConnection; return ODBCConnection::ExecuteWithDiagnostics(conn, SQL_ERROR, [=]() { @@ -369,16 +370,41 @@ SQLRETURN SQLDriverConnectW(SQLHDBC conn, SQLHWND windowHandle, // TODO: Implement SQL_DRIVER_COMPLETE_REQUIRED in SQLDriverConnectW according to the // spec https://github.com/apache/arrow/issues/46448 #if defined _WIN32 || defined _WIN64 - if (driverCompletion == SQL_DRIVER_PROMPT || - ((driverCompletion == SQL_DRIVER_COMPLETE || - driverCompletion == SQL_DRIVER_COMPLETE_REQUIRED) && - !missing_properties.empty())) { - // TODO: implement driverCompletion behavior to display connection window. + // Load the DSN window according to driverCompletion + if (driverCompletion == SQL_DRIVER_PROMPT) { + // Load DSN window before first attempt to connect + driver::flight_sql::config::Configuration config; + if (!DisplayConnectionWindow(windowHandle, config, properties)) { + return static_cast(SQL_NO_DATA); + } + connection->connect(dsn, properties, missing_properties); + } else if (driverCompletion == SQL_DRIVER_COMPLETE || + driverCompletion == SQL_DRIVER_COMPLETE_REQUIRED) { + try { + connection->connect(dsn, properties, missing_properties); + } catch (const DriverException&) { + // If first connection fails due to missing attributes, load + // the DSN window and try to connect again + if (!missing_properties.empty()) { + driver::flight_sql::config::Configuration config; + missing_properties.clear(); + + if (!DisplayConnectionWindow(windowHandle, config, properties)) { + return static_cast(SQL_NO_DATA); + } + connection->connect(dsn, properties, missing_properties); + } else { + throw; + } + } + } else { + // Default case: attempt connection without showing DSN window + connection->connect(dsn, properties, missing_properties); } -#endif - +#else + // Attempt connection without loading DSN window on macOS/Linux connection->connect(dsn, properties, missing_properties); - +#endif // Copy connection string to outConnectionString after connection attempt return ODBC::GetStringAttribute(true, connection_string, true, outConnectionString, outConnectionStringBufferLen, outConnectionStringLen, From 928796839809fc26d4ce0175e8a5150d2c87b302 Mon Sep 17 00:00:00 2001 From: "Alina (Xi) Li" Date: Mon, 26 May 2025 16:07:29 -0700 Subject: [PATCH 07/63] Add licenses to `.cmd` and `.def` files --- .../flight/sql/odbc/install/install_amd64.cmd | 17 +++++++++++++++++ cpp/src/arrow/flight/sql/odbc/odbc.def | 17 +++++++++++++++++ dev/release/rat_exclude_files.txt | 2 -- 3 files changed, 34 insertions(+), 2 deletions(-) diff --git a/cpp/src/arrow/flight/sql/odbc/install/install_amd64.cmd b/cpp/src/arrow/flight/sql/odbc/install/install_amd64.cmd index fe365d59b90..b1fd85d578e 100644 --- a/cpp/src/arrow/flight/sql/odbc/install/install_amd64.cmd +++ b/cpp/src/arrow/flight/sql/odbc/install/install_amd64.cmd @@ -1,3 +1,20 @@ +@REM Licensed to the Apache Software Foundation (ASF) under one +@REM or more contributor license agreements. See the NOTICE file +@REM distributed with this work for additional information +@REM regarding copyright ownership. The ASF licenses this file +@REM to you under the Apache License, Version 2.0 (the +@REM "License"); you may not use this file except in compliance +@REM with the License. You may obtain a copy of the License at +@REM +@REM http://www.apache.org/licenses/LICENSE-2.0 +@REM +@REM Unless required by applicable law or agreed to in writing, +@REM software distributed under the License is distributed on an +@REM "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +@REM KIND, either express or implied. See the License for the +@REM specific language governing permissions and limitations +@REM under the License. + @echo off set ODBC_AMD64=%1 diff --git a/cpp/src/arrow/flight/sql/odbc/odbc.def b/cpp/src/arrow/flight/sql/odbc/odbc.def index dba68425f49..c90c181d7c1 100644 --- a/cpp/src/arrow/flight/sql/odbc/odbc.def +++ b/cpp/src/arrow/flight/sql/odbc/odbc.def @@ -1,3 +1,20 @@ +; Licensed to the Apache Software Foundation (ASF) under one +; or more contributor license agreements. See the NOTICE file +; distributed with this work for additional information +; regarding copyright ownership. The ASF licenses this file +; to you under the Apache License, Version 2.0 (the +; "License"); you may not use this file except in compliance +; with the License. You may obtain a copy of the License at +; +; http://www.apache.org/licenses/LICENSE-2.0 +; +; Unless required by applicable law or agreed to in writing, +; software distributed under the License is distributed on an +; "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +; KIND, either express or implied. See the License for the +; specific language governing permissions and limitations +; under the License. + LIBRARY arrow_flight_sql_odbc EXPORTS ConfigDSN diff --git a/dev/release/rat_exclude_files.txt b/dev/release/rat_exclude_files.txt index ad6841bf483..0c827421fe8 100644 --- a/dev/release/rat_exclude_files.txt +++ b/dev/release/rat_exclude_files.txt @@ -13,8 +13,6 @@ ci/vcpkg/*.patch CHANGELOG.md cpp/CHANGELOG_PARQUET.md cpp/src/arrow/c/dlpack_abi.h -cpp/src/arrow/flight/sql/odbc/odbc.def -cpp/src/arrow/flight/sql/odbc/install/install_amd64.cmd cpp/src/arrow/io/mman.h cpp/src/arrow/util/random.h cpp/src/arrow/status.cc From 8cefb84c3f3ba6891e541d1395004f9ca3a5c226 Mon Sep 17 00:00:00 2001 From: rscales Date: Tue, 27 May 2025 16:19:21 -0700 Subject: [PATCH 08/63] Implement SQLGetDiagRec --- cpp/src/arrow/flight/sql/odbc/entry_points.cc | 16 ++-- cpp/src/arrow/flight/sql/odbc/odbc_api.cc | 84 ++++++++++++++++++- cpp/src/arrow/flight/sql/odbc/odbc_api.h | 4 + .../flight/sql/odbc/tests/connection_test.cc | 82 +++++++++++++++++- 4 files changed, 172 insertions(+), 14 deletions(-) diff --git a/cpp/src/arrow/flight/sql/odbc/entry_points.cc b/cpp/src/arrow/flight/sql/odbc/entry_points.cc index 0fc55720938..46536b84e9b 100644 --- a/cpp/src/arrow/flight/sql/odbc/entry_points.cc +++ b/cpp/src/arrow/flight/sql/odbc/entry_points.cc @@ -59,6 +59,14 @@ SQLRETURN SQL_API SQLGetDiagFieldW(SQLSMALLINT handleType, SQLHANDLE handle, diagInfoPtr, bufferLength, stringLengthPtr); } +SQLRETURN SQL_API SQLGetDiagRecW(SQLSMALLINT handleType, SQLHANDLE handle, + SQLSMALLINT recNumber, SQLWCHAR* sqlState, + SQLINTEGER* nativeErrorPtr, SQLWCHAR* messageText, + SQLSMALLINT bufferLength, SQLSMALLINT* textLengthPtr) { + return arrow::SQLGetDiagRecW(handleType, handle, recNumber, sqlState, nativeErrorPtr, + messageText, bufferLength, textLengthPtr); +} + SQLRETURN SQL_API SQLGetEnvAttr(SQLHENV env, SQLINTEGER attr, SQLPOINTER valuePtr, SQLINTEGER bufferLen, SQLINTEGER* strLenPtr) { return arrow::SQLGetEnvAttr(env, attr, valuePtr, bufferLen, strLenPtr); @@ -81,14 +89,6 @@ SQLRETURN SQL_API SQLGetInfoW(SQLHDBC conn, SQLUSMALLINT infoType, return arrow::SQLGetInfoW(conn, infoType, infoValuePtr, bufLen, length); } -SQLRETURN SQL_API SQLGetDiagRecW(SQLSMALLINT type, SQLHANDLE handle, SQLSMALLINT recNum, - SQLWCHAR* sqlState, SQLINTEGER* nativeError, - SQLWCHAR* msgBuffer, SQLSMALLINT msgBufferLen, - SQLSMALLINT* msgLen) { - // TODO implement SQLGetDiagRecW - return SQL_ERROR; -} - SQLRETURN SQL_API SQLDriverConnectW(SQLHDBC conn, SQLHWND windowHandle, SQLWCHAR* inConnectionString, SQLSMALLINT inConnectionStringLen, diff --git a/cpp/src/arrow/flight/sql/odbc/odbc_api.cc b/cpp/src/arrow/flight/sql/odbc/odbc_api.cc index 9368d6b9b30..bf484510ccf 100644 --- a/cpp/src/arrow/flight/sql/odbc/odbc_api.cc +++ b/cpp/src/arrow/flight/sql/odbc/odbc_api.cc @@ -230,13 +230,93 @@ SQLRETURN SQLGetDiagFieldW(SQLSMALLINT handleType, SQLHANDLE handle, stringLengthPtr, *diagnostics); } - default: - return SQL_ERROR; + default: { + // TODO Return correct dummy values + return SQL_SUCCESS; + } } return SQL_ERROR; } +SQLRETURN SQLGetDiagRecW(SQLSMALLINT handleType, SQLHANDLE handle, SQLSMALLINT recNumber, + SQLWCHAR* sqlState, SQLINTEGER* nativeErrorPtr, + SQLWCHAR* messageText, SQLSMALLINT bufferLength, + SQLSMALLINT* textLengthPtr) { + using driver::odbcabstraction::Diagnostics; + using ODBC::ConvertToSqlWChar; + using ODBC::GetStringAttribute; + using ODBC::ODBCConnection; + using ODBC::ODBCEnvironment; + + if (!handle) { + return SQL_INVALID_HANDLE; + } + + // Record number must be greater or equal to 1 + if (recNumber < 1 || bufferLength < 0) { + return SQL_ERROR; + } + + // Set character type to be Unicode by default + const bool isUnicode = true; + Diagnostics* diagnostics = nullptr; + + switch (handleType) { + case SQL_HANDLE_ENV: { + auto* environment = ODBCEnvironment::of(handle); + diagnostics = &environment->GetDiagnostics(); + break; + } + + case SQL_HANDLE_DBC: { + auto* connection = ODBCConnection::of(handle); + diagnostics = &connection->GetDiagnostics(); + break; + } + + case SQL_HANDLE_DESC: { + return SQL_ERROR; + } + + case SQL_HANDLE_STMT: { + return SQL_ERROR; + } + + default: + return SQL_INVALID_HANDLE; + } + + if (!diagnostics) { + return SQL_ERROR; + } + + // Convert from ODBC 1 based record number to internal diagnostics 0 indexed storage + const size_t recordIndex = static_cast(recNumber - 1); + if (!diagnostics->HasRecord(recordIndex)) { + return SQL_NO_DATA; + } + + if (sqlState) { + // The length of the sql state is always 5 characters plus null + SQLSMALLINT size = 6; + const std::string& state = diagnostics->GetSQLState(recordIndex); + GetStringAttribute(isUnicode, state, false, sqlState, size, &size, *diagnostics); + } + + if (nativeErrorPtr) { + *nativeErrorPtr = diagnostics->GetNativeError(recordIndex); + } + + if (messageText || textLengthPtr) { + const std::string& message = diagnostics->GetMessageText(recordIndex); + return GetStringAttribute(isUnicode, message, false, messageText, bufferLength, + textLengthPtr, *diagnostics); + } + + return SQL_SUCCESS; +} + SQLRETURN SQLGetEnvAttr(SQLHENV env, SQLINTEGER attr, SQLPOINTER valuePtr, SQLINTEGER bufferLen, SQLINTEGER* strLenPtr) { using driver::odbcabstraction::DriverException; diff --git a/cpp/src/arrow/flight/sql/odbc/odbc_api.h b/cpp/src/arrow/flight/sql/odbc/odbc_api.h index 14bbddaa3ce..d5c392bcf59 100644 --- a/cpp/src/arrow/flight/sql/odbc/odbc_api.h +++ b/cpp/src/arrow/flight/sql/odbc/odbc_api.h @@ -35,6 +35,10 @@ SQLRETURN SQLGetDiagFieldW(SQLSMALLINT handleType, SQLHANDLE handle, SQLSMALLINT recNumber, SQLSMALLINT diagIdentifier, SQLPOINTER diagInfoPtr, SQLSMALLINT bufferLength, SQLSMALLINT* stringLengthPtr); +SQLRETURN SQLGetDiagRecW(SQLSMALLINT handleType, SQLHANDLE handle, SQLSMALLINT recNumber, + SQLWCHAR* sqlState, SQLINTEGER* nativeErrorPtr, + SQLWCHAR* messageText, SQLSMALLINT bufferLength, + SQLSMALLINT* textLengthPtr); SQLRETURN SQLGetEnvAttr(SQLHENV env, SQLINTEGER attr, SQLPOINTER valuePtr, SQLINTEGER bufferLen, SQLINTEGER* strLenPtr); SQLRETURN SQLSetEnvAttr(SQLHENV env, SQLINTEGER attr, SQLPOINTER valuePtr, diff --git a/cpp/src/arrow/flight/sql/odbc/tests/connection_test.cc b/cpp/src/arrow/flight/sql/odbc/tests/connection_test.cc index c6a39ccc361..039a5fd074e 100644 --- a/cpp/src/arrow/flight/sql/odbc/tests/connection_test.cc +++ b/cpp/src/arrow/flight/sql/odbc/tests/connection_test.cc @@ -400,8 +400,7 @@ TEST(SQLDriverConnect, TestSQLDriverConnectInvalidUid) { EXPECT_TRUE(ret == SQL_ERROR); - // TODO uncomment this check when SQLGetDiagRec is implemented - // VerifyOdbcErrorState(SQL_HANDLE_DBC, conn, std::string("28000")); + VerifyOdbcErrorState(SQL_HANDLE_DBC, conn, std::string("28000")); // TODO: Check that outstr remains empty after SqlWcharToString // is fixed to handle empty `outstr` @@ -621,8 +620,7 @@ TEST(SQLConnect, TestSQLConnectInvalidUid) { // so connection still fails despite passing valid uid in SQLConnect call EXPECT_TRUE(ret == SQL_ERROR); - // TODO uncomment this check when SQLGetDiagRec is implemented - // VerifyOdbcErrorState(SQL_HANDLE_DBC, conn, std::string("28000")); + VerifyOdbcErrorState(SQL_HANDLE_DBC, conn, std::string("28000")); // Remove DSN EXPECT_TRUE(UnregisterDsn(dsn)); @@ -747,6 +745,82 @@ TEST(SQLDisconnect, TestSQLDisconnectWithoutConnection) { EXPECT_TRUE(ret == SQL_SUCCESS); } + +TEST(SQLGetDiagRec, TestSQLGetDiagRecForConnectFailure) { + // ODBC Environment + SQLHENV env; + SQLHDBC conn; + + // Allocate an environment handle + SQLRETURN ret = SQLAllocEnv(&env); + + EXPECT_TRUE(ret == SQL_SUCCESS); + + ret = SQLSetEnvAttr(env, SQL_ATTR_ODBC_VERSION, (void*)SQL_OV_ODBC3, 0); + + EXPECT_TRUE(ret == SQL_SUCCESS); + + // Allocate a connection using alloc handle + ret = SQLAllocHandle(SQL_HANDLE_DBC, env, &conn); + + EXPECT_TRUE(ret == SQL_SUCCESS); + + // Connect string + ASSERT_OK_AND_ASSIGN(std::string connect_str, + arrow::internal::GetEnvVar(TEST_CONNECT_STR)); + // Append invalid uid to connection string + connect_str += std::string("uid=non_existent_id;"); + + ASSERT_OK_AND_ASSIGN(std::wstring wconnect_str, + arrow::util::UTF8ToWideString(connect_str)); + std::vector connect_str0(wconnect_str.begin(), wconnect_str.end()); + + SQLWCHAR outstr[ODBC_BUFFER_SIZE]; + SQLSMALLINT outstrlen; + + // Connecting to ODBC server. + ret = SQLDriverConnect(conn, NULL, &connect_str0[0], + static_cast(connect_str0.size()), outstr, + ODBC_BUFFER_SIZE, &outstrlen, SQL_DRIVER_NOPROMPT); + + EXPECT_TRUE(ret == SQL_ERROR); + + if (ret != SQL_SUCCESS) { + std::cerr << GetOdbcErrorMessage(SQL_HANDLE_DBC, conn) << std::endl; + } + + SQLWCHAR sql_state[6]; + SQLINTEGER native_error; + SQLWCHAR message[ODBC_BUFFER_SIZE]; + SQLSMALLINT message_length; + + ret = SQLGetDiagRec(SQL_HANDLE_DBC, conn, 1, sql_state, &native_error, message, + ODBC_BUFFER_SIZE, &message_length); + + EXPECT_TRUE(ret == SQL_SUCCESS); + + EXPECT_TRUE(message_length > 200); + + EXPECT_TRUE(native_error == 200); + + // 28000 + EXPECT_TRUE(sql_state[0] == '2'); + EXPECT_TRUE(sql_state[1] == '8'); + EXPECT_TRUE(sql_state[2] == '0'); + EXPECT_TRUE(sql_state[3] == '0'); + EXPECT_TRUE(sql_state[4] == '0'); + + // Free connection handle + ret = SQLFreeHandle(SQL_HANDLE_DBC, conn); + + EXPECT_TRUE(ret == SQL_SUCCESS); + + // Free environment handle + ret = SQLFreeHandle(SQL_HANDLE_ENV, env); + + EXPECT_TRUE(ret == SQL_SUCCESS); +} + } // namespace integration_tests } // namespace odbc } // namespace flight From 50888acad64bc63087b9924ac9e4b6a15f70f6bc Mon Sep 17 00:00:00 2001 From: "Alina (Xi) Li" Date: Mon, 26 May 2025 16:20:18 -0700 Subject: [PATCH 09/63] Build ODBC in Windows Workflow Tests are skipped for now. --- ci/scripts/cpp_build.sh | 2 ++ ci/scripts/cpp_test.sh | 2 ++ cpp/cmake_modules/ThirdpartyToolchain.cmake | 2 +- cpp/src/arrow/flight/sql/odbc/CMakeLists.txt | 5 +++++ cpp/src/arrow/flight/sql/odbc/flight_sql/CMakeLists.txt | 8 +++++--- .../flight_sql/accessors/string_array_accessor_test.cc | 1 - cpp/src/arrow/flight/sql/odbc/flight_sql/address_info.h | 1 + .../arrow/flight/sql/odbc/flight_sql/system_trust_store.h | 3 +++ .../arrow/flight/sql/odbc/flight_sql/ui/custom_window.cc | 5 +++-- .../arrow/flight/sql/odbc/odbcabstraction/CMakeLists.txt | 3 --- 10 files changed, 22 insertions(+), 10 deletions(-) diff --git a/ci/scripts/cpp_build.sh b/ci/scripts/cpp_build.sh index ec697303d86..2d0a0f89975 100755 --- a/ci/scripts/cpp_build.sh +++ b/ci/scripts/cpp_build.sh @@ -64,6 +64,7 @@ if [ "${ARROW_ENABLE_THREADING:-ON}" = "OFF" ]; then ARROW_AZURE=OFF ARROW_FLIGHT=OFF ARROW_FLIGHT_SQL=OFF + ARROW_FLIGHT_SQL_ODBC=OFF ARROW_GCS=OFF ARROW_JEMALLOC=OFF ARROW_MIMALLOC=OFF @@ -206,6 +207,7 @@ else -DARROW_FILESYSTEM=${ARROW_FILESYSTEM:-ON} \ -DARROW_FLIGHT=${ARROW_FLIGHT:-OFF} \ -DARROW_FLIGHT_SQL=${ARROW_FLIGHT_SQL:-OFF} \ + -DARROW_FLIGHT_SQL_ODBC=${ARROW_FLIGHT_SQL_ODBC:-OFF} \ -DARROW_FUZZING=${ARROW_FUZZING:-OFF} \ -DARROW_GANDIVA_PC_CXX_FLAGS=${ARROW_GANDIVA_PC_CXX_FLAGS:-} \ -DARROW_GANDIVA=${ARROW_GANDIVA:-OFF} \ diff --git a/ci/scripts/cpp_test.sh b/ci/scripts/cpp_test.sh index e646ba964a5..05885ce4018 100755 --- a/ci/scripts/cpp_test.sh +++ b/ci/scripts/cpp_test.sh @@ -73,6 +73,8 @@ case "$(uname)" in exclude_tests="${exclude_tests}|gandiva-precompiled-test" exclude_tests="${exclude_tests}|gandiva-projector-test" exclude_tests="${exclude_tests}|gandiva-utf8-test" + # TODO: Enable ODBC tests + exclude_tests="${exclude_tests}|arrow-connection-test" ctest_options+=(--exclude-regex "${exclude_tests}") ;; *) diff --git a/cpp/cmake_modules/ThirdpartyToolchain.cmake b/cpp/cmake_modules/ThirdpartyToolchain.cmake index 29dde407169..00263849494 100644 --- a/cpp/cmake_modules/ThirdpartyToolchain.cmake +++ b/cpp/cmake_modules/ThirdpartyToolchain.cmake @@ -1283,7 +1283,7 @@ if(ARROW_USE_BOOST) endif() if(ARROW_BOOST_REQUIRE_LIBRARY) set(ARROW_BOOST_COMPONENTS filesystem system) - if(ARROW_FLIGHT_SQL_ODBC AND MSVC) + if(ARROW_FLIGHT_SQL_ODBC) list(APPEND ARROW_BOOST_COMPONENTS locale) endif() set(ARROW_BOOST_OPTIONAL_COMPONENTS process) diff --git a/cpp/src/arrow/flight/sql/odbc/CMakeLists.txt b/cpp/src/arrow/flight/sql/odbc/CMakeLists.txt index 1f7c5d1f61f..449db0fedf4 100644 --- a/cpp/src/arrow/flight/sql/odbc/CMakeLists.txt +++ b/cpp/src/arrow/flight/sql/odbc/CMakeLists.txt @@ -17,9 +17,14 @@ add_custom_target(arrow_flight_sql_odbc) +# Ensure fmt is loaded as header only +add_compile_definitions(FMT_HEADER_ONLY) + if(WIN32) if(MSVC_VERSION GREATER_EQUAL 1900) set(ODBCINST legacy_stdio_definitions odbccp32 shlwapi) + elseif(MINGW) + set(ODBCINST odbccp32 shlwapi) endif() elseif(APPLE) set(ODBCINST iodbcinst) diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/CMakeLists.txt b/cpp/src/arrow/flight/sql/odbc/flight_sql/CMakeLists.txt index 6985f781b9a..e9f282e91f9 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/CMakeLists.txt +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/CMakeLists.txt @@ -101,10 +101,12 @@ if(WIN32) win_system_dsn.cc) endif() -target_link_libraries(arrow_odbc_spi_impl PUBLIC odbcabstraction arrow_flight_sql_shared) +target_link_libraries(arrow_odbc_spi_impl PUBLIC odbcabstraction arrow_flight_sql_shared + Boost::locale) -if(MSVC) - target_link_libraries(arrow_odbc_spi_impl PUBLIC Boost::locale) +# Link libraries on MINGW64 only +if(MINGW AND CMAKE_CXX_COMPILER_ID STREQUAL "GNU") + target_link_libraries(arrow_odbc_spi_impl PUBLIC ${ODBCINST}) endif() set_target_properties(arrow_odbc_spi_impl diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/accessors/string_array_accessor_test.cc b/cpp/src/arrow/flight/sql/odbc/flight_sql/accessors/string_array_accessor_test.cc index 8b568bbffcf..587e7d5eb1c 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/accessors/string_array_accessor_test.cc +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/accessors/string_array_accessor_test.cc @@ -134,7 +134,6 @@ TEST(StringArrayAccessor, Test_CDataType_WCHAR_Truncation) { ColumnBinding binding(odbcabstraction::CDataType_WCHAR, 0, 0, buffer.data(), max_strlen, strlen_buffer.data()); - std::basic_stringstream ss; int64_t value_offset = 0; // Construct the whole string by concatenating smaller chunks from diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/address_info.h b/cpp/src/arrow/flight/sql/odbc/flight_sql/address_info.h index 312d5689a98..2609abd6af7 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/address_info.h +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/address_info.h @@ -21,6 +21,7 @@ #include #include +#include #if !_WIN32 # include #endif diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/system_trust_store.h b/cpp/src/arrow/flight/sql/odbc/flight_sql/system_trust_store.h index 71175b09709..f8e02fea526 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/system_trust_store.h +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/system_trust_store.h @@ -24,6 +24,9 @@ # include # include + +# include + # include # include diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/ui/custom_window.cc b/cpp/src/arrow/flight/sql/odbc/flight_sql/ui/custom_window.cc index e79e1221e78..8f660a21329 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/ui/custom_window.cc +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/ui/custom_window.cc @@ -23,6 +23,7 @@ #include #include +#include #include #include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/exceptions.h" @@ -53,7 +54,7 @@ LRESULT CALLBACK CustomWindow::WndProc(HWND hwnd, UINT msg, WPARAM wParam, switch (msg) { case WM_NCCREATE: { - _ASSERT(lParam != NULL); + assert(lParam != NULL); CREATESTRUCT* createStruct = reinterpret_cast(lParam); @@ -65,7 +66,7 @@ LRESULT CALLBACK CustomWindow::WndProc(HWND hwnd, UINT msg, WPARAM wParam, } case WM_CREATE: { - _ASSERT(window != NULL); + assert(window != NULL); window->SetHandle(hwnd); diff --git a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/CMakeLists.txt b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/CMakeLists.txt index c9614b88a5b..e1e52492648 100644 --- a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/CMakeLists.txt +++ b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/CMakeLists.txt @@ -17,9 +17,6 @@ include_directories(include) -# Ensure fmt is loaded as header only -add_compile_definitions(FMT_HEADER_ONLY) - add_library(odbcabstraction include/odbcabstraction/calendar_utils.h include/odbcabstraction/diagnostics.h From 3ca8dd3f3398d7c8b386ce0fd9d86f3a956d456a Mon Sep 17 00:00:00 2001 From: rscales Date: Wed, 4 Jun 2025 13:17:35 -0700 Subject: [PATCH 10/63] Updates for SQLGetDiagFieldW --- cpp/src/arrow/flight/sql/odbc/odbc_api.cc | 180 +++++++++++++--- .../odbc_impl/attribute_utils.h | 10 +- .../flight/sql/odbc/tests/connection_test.cc | 201 +++++++++++++++++- 3 files changed, 347 insertions(+), 44 deletions(-) diff --git a/cpp/src/arrow/flight/sql/odbc/odbc_api.cc b/cpp/src/arrow/flight/sql/odbc/odbc_api.cc index bf484510ccf..5f54a5d714a 100644 --- a/cpp/src/arrow/flight/sql/odbc/odbc_api.cc +++ b/cpp/src/arrow/flight/sql/odbc/odbc_api.cc @@ -135,10 +135,26 @@ SQLRETURN SQLFreeHandle(SQLSMALLINT type, SQLHANDLE handle) { return SQL_ERROR; } +inline bool IsValidStringFieldArgs(SQLPOINTER diagInfoPtr, SQLSMALLINT bufferLength, + SQLSMALLINT* stringLengthPtr, bool isUnicode) { + const SQLSMALLINT charSize = isUnicode ? GetSqlWCharSize() : sizeof(char); + const bool hasValidBuffer = + diagInfoPtr && bufferLength >= 0 && bufferLength % charSize == 0; + + // regardless of capacity return false if invalid + if (diagInfoPtr && !hasValidBuffer) { + return false; + } + + return hasValidBuffer || stringLengthPtr; +} + SQLRETURN SQLGetDiagFieldW(SQLSMALLINT handleType, SQLHANDLE handle, SQLSMALLINT recNumber, SQLSMALLINT diagIdentifier, SQLPOINTER diagInfoPtr, SQLSMALLINT bufferLength, SQLSMALLINT* stringLengthPtr) { + // TODO: Implement additional fields types + // https://github.com/apache/arrow/issues/46573 using driver::odbcabstraction::Diagnostics; using ODBC::GetStringAttribute; using ODBC::ODBCConnection; @@ -148,12 +164,18 @@ SQLRETURN SQLGetDiagFieldW(SQLSMALLINT handleType, SQLHANDLE handle, return SQL_INVALID_HANDLE; } - if (!diagInfoPtr) { + if (!diagInfoPtr && !stringLengthPtr) { return SQL_ERROR; } - // Set character type to be Unicode by defualt (not Ansi) - bool isUnicode = true; + // If buffer length derived from null terminated string + if (diagInfoPtr && bufferLength == SQL_NTS) { + const wchar_t* str = reinterpret_cast(diagInfoPtr); + bufferLength = wcslen(str) * driver::odbcabstraction::GetSqlWCharSize(); + } + + // Set character type to be Unicode by default + const bool isUnicode = true; Diagnostics* diagnostics = nullptr; switch (handleType) { @@ -169,6 +191,14 @@ SQLRETURN SQLGetDiagFieldW(SQLSMALLINT handleType, SQLHANDLE handle, break; } + case SQL_HANDLE_DESC: { + return SQL_ERROR; + } + + case SQL_HANDLE_STMT: { + return SQL_ERROR; + } + default: return SQL_ERROR; } @@ -177,32 +207,46 @@ SQLRETURN SQLGetDiagFieldW(SQLSMALLINT handleType, SQLHANDLE handle, return SQL_ERROR; } - // Retrieve header level diagnostics if Record 0 specified - if (recNumber == 0) { - switch (diagIdentifier) { - case SQL_DIAG_NUMBER: { - SQLINTEGER count = static_cast(diagnostics->GetRecordCount()); - *static_cast(diagInfoPtr) = count; - if (stringLengthPtr) { - *stringLengthPtr = sizeof(SQLINTEGER); - } + // Retrieve and return if header level diagnostics + switch (diagIdentifier) { + case SQL_DIAG_NUMBER: { + if (diagInfoPtr) { + *static_cast(diagInfoPtr) = + static_cast(diagnostics->GetRecordCount()); + } - return SQL_SUCCESS; + if (stringLengthPtr) { + *stringLengthPtr = sizeof(SQLINTEGER); } - case SQL_DIAG_SERVER_NAME: { - const std::string source = diagnostics->GetDataSourceComponent(); - return GetStringAttribute(isUnicode, source, false, diagInfoPtr, bufferLength, - stringLengthPtr, *diagnostics); + return SQL_SUCCESS; + } + + // TODO implement return code function + case SQL_DIAG_RETURNCODE: { + return SQL_SUCCESS; + } + + // TODO Implement statement header functions + case SQL_DIAG_CURSOR_ROW_COUNT: + case SQL_DIAG_DYNAMIC_FUNCTION: + case SQL_DIAG_DYNAMIC_FUNCTION_CODE: + case SQL_DIAG_ROW_COUNT: { + if (handleType == SQL_HANDLE_STMT) { + return SQL_SUCCESS; } - default: - return SQL_ERROR; + return SQL_ERROR; } } + // If not a diagnostic header field then the record number must be 1 or greater + if (recNumber < 1) { + return SQL_ERROR; + } + // Retrieve record level diagnostics from specified 1 based record - uint32_t recordIndex = static_cast(recNumber - 1); + const uint32_t recordIndex = static_cast(recNumber - 1); if (!diagnostics->HasRecord(recordIndex)) { return SQL_NO_DATA; } @@ -210,13 +254,20 @@ SQLRETURN SQLGetDiagFieldW(SQLSMALLINT handleType, SQLHANDLE handle, // Retrieve record field data switch (diagIdentifier) { case SQL_DIAG_MESSAGE_TEXT: { - const std::string message = diagnostics->GetMessageText(recordIndex); - return GetStringAttribute(isUnicode, message, false, diagInfoPtr, bufferLength, - stringLengthPtr, *diagnostics); + if (IsValidStringFieldArgs(diagInfoPtr, bufferLength, stringLengthPtr, isUnicode)) { + const std::string& message = diagnostics->GetMessageText(recordIndex); + return GetStringAttribute(isUnicode, message, true, diagInfoPtr, bufferLength, + stringLengthPtr, *diagnostics); + } + + return SQL_ERROR; } case SQL_DIAG_NATIVE: { - *static_cast(diagInfoPtr) = diagnostics->GetNativeError(recordIndex); + if (diagInfoPtr) { + *static_cast(diagInfoPtr) = diagnostics->GetNativeError(recordIndex); + } + if (stringLengthPtr) { *stringLengthPtr = sizeof(SQLINTEGER); } @@ -224,16 +275,85 @@ SQLRETURN SQLGetDiagFieldW(SQLSMALLINT handleType, SQLHANDLE handle, return SQL_SUCCESS; } + case SQL_DIAG_SERVER_NAME: { + if (IsValidStringFieldArgs(diagInfoPtr, bufferLength, stringLengthPtr, isUnicode)) { + switch (handleType) { + case SQL_HANDLE_DBC: { + ODBCConnection* connection = reinterpret_cast(handle); + std::string dsn = connection->GetDSN(); + return GetStringAttribute(isUnicode, dsn, true, diagInfoPtr, bufferLength, + stringLengthPtr, *diagnostics); + } + + case SQL_HANDLE_DESC: { + // TODO Implement for case of descriptor + return SQL_ERROR; + } + + case SQL_HANDLE_STMT: { + // TODO Implement for case of statement + return SQL_ERROR; + } + + default: + return SQL_ERROR; + } + } + + return SQL_ERROR; + } + case SQL_DIAG_SQLSTATE: { - const std::string state = diagnostics->GetSQLState(recordIndex); - return GetStringAttribute(isUnicode, state, false, diagInfoPtr, bufferLength, - stringLengthPtr, *diagnostics); + if (IsValidStringFieldArgs(diagInfoPtr, bufferLength, stringLengthPtr, isUnicode)) { + const std::string& state = diagnostics->GetSQLState(recordIndex); + return GetStringAttribute(isUnicode, state, true, diagInfoPtr, bufferLength, + stringLengthPtr, *diagnostics); + } + + return SQL_ERROR; } - default: { - // TODO Return correct dummy values + // Return valid dummy variable for unimplemented field + case SQL_DIAG_COLUMN_NUMBER: { + if (diagInfoPtr) { + *static_cast(diagInfoPtr) = SQL_NO_COLUMN_NUMBER; + } + + if (stringLengthPtr) { + *stringLengthPtr = sizeof(SQLINTEGER); + } + + return SQL_SUCCESS; + } + + // Return empty string dummy variable for unimplemented fields + case SQL_DIAG_CLASS_ORIGIN: + case SQL_DIAG_CONNECTION_NAME: + case SQL_DIAG_SUBCLASS_ORIGIN: { + if (IsValidStringFieldArgs(diagInfoPtr, bufferLength, stringLengthPtr, isUnicode)) { + return GetStringAttribute(isUnicode, "", true, diagInfoPtr, bufferLength, + stringLengthPtr, *diagnostics); + } + + return SQL_ERROR; + } + + // Return valid dummy variable for unimplemented field + case SQL_DIAG_ROW_NUMBER: { + if (diagInfoPtr) { + *static_cast(diagInfoPtr) = SQL_NO_ROW_NUMBER; + } + + if (stringLengthPtr) { + *stringLengthPtr = sizeof(SQLLEN); + } + return SQL_SUCCESS; } + + default: { + return SQL_ERROR; + } } return SQL_ERROR; @@ -486,7 +606,7 @@ SQLRETURN SQLDriverConnectW(SQLHDBC conn, SQLHWND windowHandle, connection->connect(dsn, properties, missing_properties); #endif // Copy connection string to outConnectionString after connection attempt - return ODBC::GetStringAttribute(true, connection_string, true, outConnectionString, + return ODBC::GetStringAttribute(true, connection_string, false, outConnectionString, outConnectionStringBufferLen, outConnectionStringLen, connection->GetDiagnostics()); }); diff --git a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/attribute_utils.h b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/attribute_utils.h index 7cf52b6cc1d..d194ace237f 100644 --- a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/attribute_utils.h +++ b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/attribute_utils.h @@ -79,16 +79,20 @@ template inline SQLRETURN GetAttributeSQLWCHAR(const std::string_view& attributeValue, bool isLengthInBytes, SQLPOINTER output, O outputSize, O* outputLenPtr) { - size_t result = + size_t length = ConvertToSqlWChar(attributeValue, reinterpret_cast(output), isLengthInBytes ? outputSize : outputSize * GetSqlWCharSize()); + if (!isLengthInBytes) { + length = length / GetSqlWCharSize(); + } + if (outputLenPtr) { - *outputLenPtr = static_cast(isLengthInBytes ? result : result / GetSqlWCharSize()); + *outputLenPtr = static_cast(length); } if (output && - outputSize < static_cast(result + (isLengthInBytes ? GetSqlWCharSize() : 1))) { + outputSize < static_cast(length + (isLengthInBytes ? GetSqlWCharSize() : 1))) { return SQL_SUCCESS_WITH_INFO; } return SQL_SUCCESS; diff --git a/cpp/src/arrow/flight/sql/odbc/tests/connection_test.cc b/cpp/src/arrow/flight/sql/odbc/tests/connection_test.cc index 039a5fd074e..b2866b3e5a7 100644 --- a/cpp/src/arrow/flight/sql/odbc/tests/connection_test.cc +++ b/cpp/src/arrow/flight/sql/odbc/tests/connection_test.cc @@ -746,7 +746,7 @@ TEST(SQLDisconnect, TestSQLDisconnectWithoutConnection) { EXPECT_TRUE(ret == SQL_SUCCESS); } -TEST(SQLGetDiagRec, TestSQLGetDiagRecForConnectFailure) { +TEST(SQLGetDiagFieldW, TestSQLGetDiagFieldWForConnectFailure) { // ODBC Environment SQLHENV env; SQLHDBC conn; @@ -785,9 +785,188 @@ TEST(SQLGetDiagRec, TestSQLGetDiagRecForConnectFailure) { EXPECT_TRUE(ret == SQL_ERROR); - if (ret != SQL_SUCCESS) { - std::cerr << GetOdbcErrorMessage(SQL_HANDLE_DBC, conn) << std::endl; - } + // Retrieve all supported header level and record level data + SQLSMALLINT HEADER_LEVEL = 0; + SQLSMALLINT RECORD_1 = 1; + + // SQL_DIAG_NUMBER + SQLINTEGER diag_number; + SQLSMALLINT diag_number_length; + + ret = SQLGetDiagField(SQL_HANDLE_DBC, conn, HEADER_LEVEL, SQL_DIAG_NUMBER, &diag_number, + sizeof(SQLINTEGER), &diag_number_length); + + EXPECT_TRUE(ret == SQL_SUCCESS); + + EXPECT_EQ(diag_number, 1); + + // SQL_DIAG_SERVER_NAME + SQLWCHAR server_name[ODBC_BUFFER_SIZE]; + SQLSMALLINT server_name_length; + + ret = SQLGetDiagField(SQL_HANDLE_DBC, conn, RECORD_1, SQL_DIAG_SERVER_NAME, server_name, + ODBC_BUFFER_SIZE, &server_name_length); + + EXPECT_TRUE(ret == SQL_SUCCESS); + + // SQL_DIAG_MESSAGE_TEXT + SQLWCHAR message_text[ODBC_BUFFER_SIZE]; + SQLSMALLINT message_text_length; + + ret = SQLGetDiagField(SQL_HANDLE_DBC, conn, RECORD_1, SQL_DIAG_MESSAGE_TEXT, + message_text, ODBC_BUFFER_SIZE, &message_text_length); + + EXPECT_TRUE(ret == SQL_SUCCESS); + + EXPECT_GT(message_text_length, 100); + + // SQL_DIAG_NATIVE + SQLINTEGER diag_native; + SQLSMALLINT diag_native_length; + + ret = SQLGetDiagField(SQL_HANDLE_DBC, conn, RECORD_1, SQL_DIAG_NATIVE, &diag_native, + sizeof(diag_native), &diag_native_length); + + EXPECT_TRUE(ret == SQL_SUCCESS); + + EXPECT_EQ(diag_native, 200); + + // SQL_DIAG_SQLSTATE + const SQLSMALLINT sql_state_size = 6; + SQLWCHAR sql_state[sql_state_size]; + SQLSMALLINT sql_state_length; + ret = SQLGetDiagField(SQL_HANDLE_DBC, conn, RECORD_1, SQL_DIAG_SQLSTATE, sql_state, + sql_state_size * driver::odbcabstraction::GetSqlWCharSize(), + &sql_state_length); + + EXPECT_TRUE(ret == SQL_SUCCESS); + + // 28000 + EXPECT_EQ(sql_state[0], '2'); + EXPECT_EQ(sql_state[1], '8'); + EXPECT_EQ(sql_state[2], '0'); + EXPECT_EQ(sql_state[3], '0'); + EXPECT_EQ(sql_state[4], '0'); + + // Free connection handle + ret = SQLFreeHandle(SQL_HANDLE_DBC, conn); + + EXPECT_TRUE(ret == SQL_SUCCESS); + + // Free environment handle + ret = SQLFreeHandle(SQL_HANDLE_ENV, env); + + EXPECT_TRUE(ret == SQL_SUCCESS); +} + +TEST(SQLGetDiagFieldW, TestSQLGetDiagFieldWForConnectFailureNTS) { + // Test is disabled because driver manager on Windows does not pass through SQL_NTS + // This test case can be potentionally used on macOS/Linux + GTEST_SKIP(); + // ODBC Environment + SQLHENV env; + SQLHDBC conn; + + // Allocate an environment handle + SQLRETURN ret = SQLAllocEnv(&env); + + EXPECT_TRUE(ret == SQL_SUCCESS); + + ret = SQLSetEnvAttr(env, SQL_ATTR_ODBC_VERSION, (void*)SQL_OV_ODBC3, 0); + + EXPECT_TRUE(ret == SQL_SUCCESS); + + // Allocate a connection using alloc handle + ret = SQLAllocHandle(SQL_HANDLE_DBC, env, &conn); + + EXPECT_TRUE(ret == SQL_SUCCESS); + + // Connect string + ASSERT_OK_AND_ASSIGN(std::string connect_str, + arrow::internal::GetEnvVar(TEST_CONNECT_STR)); + // Append invalid uid to connection string + connect_str += std::string("uid=non_existent_id;"); + + ASSERT_OK_AND_ASSIGN(std::wstring wconnect_str, + arrow::util::UTF8ToWideString(connect_str)); + std::vector connect_str0(wconnect_str.begin(), wconnect_str.end()); + + SQLWCHAR outstr[ODBC_BUFFER_SIZE]; + SQLSMALLINT outstrlen; + + // Connecting to ODBC server. + ret = SQLDriverConnect(conn, NULL, &connect_str0[0], + static_cast(connect_str0.size()), outstr, + ODBC_BUFFER_SIZE, &outstrlen, SQL_DRIVER_NOPROMPT); + + EXPECT_TRUE(ret == SQL_ERROR); + + // Retrieve all supported header level and record level data + SQLSMALLINT HEADER_LEVEL = 0; + SQLSMALLINT RECORD_1 = 1; + + // SQL_DIAG_MESSAGE_TEXT SQL_NTS + SQLWCHAR message_text[ODBC_BUFFER_SIZE]; + SQLSMALLINT message_text_length; + + message_text[ODBC_BUFFER_SIZE - 1] = '\0'; + + ret = SQLGetDiagField(SQL_HANDLE_DBC, conn, RECORD_1, SQL_DIAG_MESSAGE_TEXT, + message_text, SQL_NTS, &message_text_length); + + EXPECT_TRUE(ret == SQL_SUCCESS); + + EXPECT_GT(message_text_length, 100); + + // Free connection handle + ret = SQLFreeHandle(SQL_HANDLE_DBC, conn); + + EXPECT_TRUE(ret == SQL_SUCCESS); + + // Free environment handle + ret = SQLFreeHandle(SQL_HANDLE_ENV, env); + + EXPECT_TRUE(ret == SQL_SUCCESS); +} + +TEST(SQLGetDiagRec, TestSQLGetDiagRecForConnectFailure) { + // ODBC Environment + SQLHENV env; + SQLHDBC conn; + + // Allocate an environment handle + SQLRETURN ret = SQLAllocEnv(&env); + + EXPECT_TRUE(ret == SQL_SUCCESS); + + ret = SQLSetEnvAttr(env, SQL_ATTR_ODBC_VERSION, (void*)SQL_OV_ODBC3, 0); + + EXPECT_TRUE(ret == SQL_SUCCESS); + + // Allocate a connection using alloc handle + ret = SQLAllocHandle(SQL_HANDLE_DBC, env, &conn); + + EXPECT_TRUE(ret == SQL_SUCCESS); + + // Connect string + ASSERT_OK_AND_ASSIGN(std::string connect_str, + arrow::internal::GetEnvVar(TEST_CONNECT_STR)); + // Append invalid uid to connection string + connect_str += std::string("uid=non_existent_id;"); + + ASSERT_OK_AND_ASSIGN(std::wstring wconnect_str, + arrow::util::UTF8ToWideString(connect_str)); + std::vector connect_str0(wconnect_str.begin(), wconnect_str.end()); + + SQLWCHAR outstr[ODBC_BUFFER_SIZE]; + SQLSMALLINT outstrlen; + + // Connecting to ODBC server. + ret = SQLDriverConnect(conn, NULL, &connect_str0[0], + static_cast(connect_str0.size()), outstr, + ODBC_BUFFER_SIZE, &outstrlen, SQL_DRIVER_NOPROMPT); + + EXPECT_TRUE(ret == SQL_ERROR); SQLWCHAR sql_state[6]; SQLINTEGER native_error; @@ -799,16 +978,16 @@ TEST(SQLGetDiagRec, TestSQLGetDiagRecForConnectFailure) { EXPECT_TRUE(ret == SQL_SUCCESS); - EXPECT_TRUE(message_length > 200); + EXPECT_GT(message_length, 200); - EXPECT_TRUE(native_error == 200); + EXPECT_EQ(native_error, 200); // 28000 - EXPECT_TRUE(sql_state[0] == '2'); - EXPECT_TRUE(sql_state[1] == '8'); - EXPECT_TRUE(sql_state[2] == '0'); - EXPECT_TRUE(sql_state[3] == '0'); - EXPECT_TRUE(sql_state[4] == '0'); + EXPECT_EQ(sql_state[0], '2'); + EXPECT_EQ(sql_state[1], '8'); + EXPECT_EQ(sql_state[2], '0'); + EXPECT_EQ(sql_state[3], '0'); + EXPECT_EQ(sql_state[4], '0'); // Free connection handle ret = SQLFreeHandle(SQL_HANDLE_DBC, conn); From 00aa3d5d5079aabc848c9f0101aa7680c3d16323 Mon Sep 17 00:00:00 2001 From: "Alina (Xi) Li" Date: Fri, 30 May 2025 11:41:18 -0700 Subject: [PATCH 11/63] Enable Driver Logging Add todo to update logging system later Add logs --- .../flight/sql/odbc/flight_sql/address_info.h | 3 +- .../sql/odbc/flight_sql/flight_sql_driver.cc | 15 ++++- .../sql/odbc/flight_sql/win_system_dsn.cc | 3 +- cpp/src/arrow/flight/sql/odbc/odbc_api.cc | 67 ++++++++++++++++--- .../include/odbcabstraction/logger.h | 5 +- .../include/odbcabstraction/utils.h | 3 +- .../flight/sql/odbc/odbcabstraction/utils.cc | 5 +- 7 files changed, 82 insertions(+), 19 deletions(-) diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/address_info.h b/cpp/src/arrow/flight/sql/odbc/flight_sql/address_info.h index 2609abd6af7..91f5a7175d7 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/address_info.h +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/address_info.h @@ -19,7 +19,8 @@ #include -#include +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/platform.h" + #include #include #if !_WIN32 diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_driver.cc b/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_driver.cc index 61a11252380..cb0e5c5ae5c 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_driver.cc +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_driver.cc @@ -20,9 +20,11 @@ #include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/platform.h" #include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/spd_logger.h" #include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/utils.h" +#include "arrow/util/io_util.h" #define DEFAULT_MAXIMUM_FILE_SIZE 16777216 #define CONFIG_FILE_NAME "arrow-odbc.ini" +#define CONFIG_FILE_PATH "CONFIG_FILE_PATH" namespace driver { namespace flight_sql { @@ -52,7 +54,9 @@ LogLevel ToLogLevel(int64_t level) { } // namespace FlightSqlDriver::FlightSqlDriver() - : diagnostics_("Apache Arrow", "Flight SQL", OdbcVersion::V_3), version_("0.9.0.0") {} + : diagnostics_("Apache Arrow", "Flight SQL", OdbcVersion::V_3), version_("0.9.0.0") { + RegisterLog(); +} std::shared_ptr FlightSqlDriver::CreateConnection(OdbcVersion odbc_version) { return std::make_shared(odbc_version, version_); @@ -63,14 +67,19 @@ odbcabstraction::Diagnostics& FlightSqlDriver::GetDiagnostics() { return diagnos void FlightSqlDriver::SetVersion(std::string version) { version_ = std::move(version); } void FlightSqlDriver::RegisterLog() { + std::string config_path = arrow::internal::GetEnvVar(CONFIG_FILE_PATH).ValueOr(""); + if (config_path.empty()) { + return; + } + odbcabstraction::PropertyMap propertyMap; - driver::odbcabstraction::ReadConfigFile(propertyMap, CONFIG_FILE_NAME); + driver::odbcabstraction::ReadConfigFile(propertyMap, config_path, CONFIG_FILE_NAME); auto log_enable_iterator = propertyMap.find(std::string(SPDLogger::LOG_ENABLED)); auto log_enabled = log_enable_iterator != propertyMap.end() ? odbcabstraction::AsBool(log_enable_iterator->second) : false; - if (!log_enabled) { + if (!log_enabled.get()) { return; } diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/win_system_dsn.cc b/cpp/src/arrow/flight/sql/odbc/flight_sql/win_system_dsn.cc index 5c8d116a7bc..18b5c399c2c 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/win_system_dsn.cc +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/win_system_dsn.cc @@ -29,6 +29,7 @@ #include "arrow/flight/sql/odbc/flight_sql/include/flight_sql/ui/window.h" #include "arrow/flight/sql/odbc/flight_sql/system_dsn.h" #include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/exceptions.h" +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/logger.h" #include #include @@ -87,7 +88,7 @@ bool DisplayConnectionWindow(void* windowParent, Configuration& config, properties = config.GetProperties(); return true; } else { - // TODO: log cancelled dialog after logging is enabled. + LOG_INFO("Dialog is cancelled by user"); return false; } } diff --git a/cpp/src/arrow/flight/sql/odbc/odbc_api.cc b/cpp/src/arrow/flight/sql/odbc/odbc_api.cc index 5f54a5d714a..b3deb11bbb7 100644 --- a/cpp/src/arrow/flight/sql/odbc/odbc_api.cc +++ b/cpp/src/arrow/flight/sql/odbc/odbc_api.cc @@ -18,15 +18,15 @@ // flight_sql_connection.h needs to be included first due to conflicts with windows.h #include "arrow/flight/sql/odbc/flight_sql/flight_sql_connection.h" -#include -#include -#include -#include -#include -#include - #include "arrow/flight/sql/odbc/flight_sql/include/flight_sql/config/configuration.h" +#include "arrow/flight/sql/odbc/flight_sql/include/flight_sql/flight_sql_driver.h" +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/diagnostics.h" +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/logger.h" #include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/attribute_utils.h" +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/encoding_utils.h" +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/odbc_connection.h" +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/odbc_environment.h" +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/spi/connection.h" #if defined _WIN32 || defined _WIN64 // For displaying DSN Window @@ -35,10 +35,13 @@ // odbc_api includes windows.h, which needs to be put behind winsock2.h. // odbc_environment.h includes winsock2.h -#include +#include "arrow/flight/sql/odbc/odbc_api.h" namespace arrow { SQLRETURN SQLAllocHandle(SQLSMALLINT type, SQLHANDLE parent, SQLHANDLE* result) { + LOG_DEBUG("SQLAllocHandle called with type: {}, parent: {}, result: {}", type, parent, + fmt::ptr(result)); + *result = nullptr; switch (type) { @@ -93,6 +96,8 @@ SQLRETURN SQLAllocHandle(SQLSMALLINT type, SQLHANDLE parent, SQLHANDLE* result) } SQLRETURN SQLFreeHandle(SQLSMALLINT type, SQLHANDLE handle) { + LOG_DEBUG("SQLFreeHandle called with type: {}, handle: {}", type, handle); + switch (type) { case SQL_HANDLE_ENV: { using ODBC::ODBCEnvironment; @@ -160,6 +165,12 @@ SQLRETURN SQLGetDiagFieldW(SQLSMALLINT handleType, SQLHANDLE handle, using ODBC::ODBCConnection; using ODBC::ODBCEnvironment; + LOG_DEBUG( + "SQLGetDiagFieldW called with handleType: {}, handle: {}, recNumber: {}, " + "diagIdentifier: {}, diagInfoPtr: {}, bufferLength: {}, stringLengthPtr: {}", + handleType, handle, recNumber, diagIdentifier, diagInfoPtr, bufferLength, + fmt::ptr(stringLengthPtr)); + if (!handle) { return SQL_INVALID_HANDLE; } @@ -369,6 +380,13 @@ SQLRETURN SQLGetDiagRecW(SQLSMALLINT handleType, SQLHANDLE handle, SQLSMALLINT r using ODBC::ODBCConnection; using ODBC::ODBCEnvironment; + LOG_DEBUG( + "SQLGetDiagRecW called with handleType: {}, handle: {}, recNumber: {}, " + "sqlState: {}, nativeErrorPtr: {}, messageText: {}, bufferLength: {}, " + "textLengthPtr: {}", + handleType, handle, recNumber, fmt::ptr(sqlState), fmt::ptr(nativeErrorPtr), + fmt::ptr(messageText), bufferLength, fmt::ptr(textLengthPtr)); + if (!handle) { return SQL_INVALID_HANDLE; } @@ -438,10 +456,15 @@ SQLRETURN SQLGetDiagRecW(SQLSMALLINT handleType, SQLHANDLE handle, SQLSMALLINT r } SQLRETURN SQLGetEnvAttr(SQLHENV env, SQLINTEGER attr, SQLPOINTER valuePtr, - SQLINTEGER bufferLen, SQLINTEGER* strLenPtr) { + SQLINTEGER bufferLength, SQLINTEGER* strLenPtr) { using driver::odbcabstraction::DriverException; using ODBC::ODBCEnvironment; + LOG_DEBUG( + "SQLGetEnvAttr called with env: {}, attr: {}, valuePtr: {}, " + "bufferLength: {}, strLenPtr: {}", + env, attr, valuePtr, bufferLength, fmt::ptr(strLenPtr)); + ODBCEnvironment* environment = reinterpret_cast(env); return ODBCEnvironment::ExecuteWithDiagnostics(environment, SQL_ERROR, [=]() { @@ -498,6 +521,11 @@ SQLRETURN SQLSetEnvAttr(SQLHENV env, SQLINTEGER attr, SQLPOINTER valuePtr, using driver::odbcabstraction::DriverException; using ODBC::ODBCEnvironment; + LOG_DEBUG( + "SQLSetEnvAttr called with env: {}, attr: {}, valuePtr: {}, " + "strLen: {}", + env, attr, valuePtr, strLen); + ODBCEnvironment* environment = reinterpret_cast(env); return ODBCEnvironment::ExecuteWithDiagnostics(environment, SQL_ERROR, [=]() { @@ -557,6 +585,14 @@ SQLRETURN SQLDriverConnectW(SQLHDBC conn, SQLHWND windowHandle, using driver::odbcabstraction::DriverException; using ODBC::ODBCConnection; + LOG_DEBUG( + "SQLDriverConnectW called with conn: {}, windowHandle: {}, inConnectionString: {}, " + "inConnectionStringLen: {}, outConnectionString: {}, outConnectionStringBufferLen: " + "{}, outConnectionStringLen: {}, driverCompletion: {}", + conn, fmt::ptr(windowHandle), fmt::ptr(inConnectionString), inConnectionStringLen, + fmt::ptr(outConnectionString), outConnectionStringBufferLen, + fmt::ptr(outConnectionStringLen), driverCompletion); + return ODBCConnection::ExecuteWithDiagnostics(conn, SQL_ERROR, [=]() { ODBCConnection* connection = reinterpret_cast(conn); std::string connection_string = @@ -621,6 +657,12 @@ SQLRETURN SQLConnectW(SQLHDBC conn, SQLWCHAR* dsnName, SQLSMALLINT dsnNameLen, using ODBC::SqlWcharToString; + LOG_DEBUG( + "SQLConnectW called with conn: {}, dsnName: {}, dsnNameLen: {}, userName: {}, " + "userNameLen: {}, password: {}, passwordLen: {}", + conn, fmt::ptr(dsnName), dsnNameLen, fmt::ptr(userName), userNameLen, + fmt::ptr(password), passwordLen); + return ODBCConnection::ExecuteWithDiagnostics(conn, SQL_ERROR, [=]() { ODBCConnection* connection = reinterpret_cast(conn); std::string dsn = SqlWcharToString(dsnName, dsnNameLen); @@ -649,6 +691,8 @@ SQLRETURN SQLConnectW(SQLHDBC conn, SQLWCHAR* dsnName, SQLSMALLINT dsnNameLen, SQLRETURN SQLDisconnect(SQLHDBC conn) { using ODBC::ODBCConnection; + LOG_DEBUG("SQLDisconnect called with conn: {}", conn); + return ODBCConnection::ExecuteWithDiagnostics(conn, SQL_ERROR, [=]() { ODBCConnection* connection = reinterpret_cast(conn); @@ -663,6 +707,11 @@ SQLRETURN SQLGetInfoW(SQLHDBC conn, SQLUSMALLINT infoType, SQLPOINTER infoValueP // TODO: complete implementation of SQLGetInfoW and write tests using ODBC::ODBCConnection; + LOG_DEBUG( + "SQLGetInfoW called with conn: {}, infoType: {}, infoValuePtr: {}, bufLen: {}, " + "length: {}", + conn, infoType, infoValuePtr, bufLen, fmt::ptr(length)); + return ODBCConnection::ExecuteWithDiagnostics(conn, SQL_ERROR, [=]() { ODBCConnection* connection = reinterpret_cast(conn); diff --git a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/logger.h b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/logger.h index 5f8619cbb92..4ea3261cbed 100644 --- a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/logger.h +++ b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/logger.h @@ -20,7 +20,10 @@ #include #include -#include +#include + +// The logger using spdlog is deprecated and will be replaced. +// TODO: mirgate logging to use Arrow's internal logging system #define __LAZY_LOG(LEVEL, ...) \ do { \ diff --git a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/utils.h b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/utils.h index 0fa8463b546..6e1fe5739be 100644 --- a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/utils.h +++ b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/utils.h @@ -52,7 +52,8 @@ boost::optional AsInt32(int32_t min_value, const Connection::ConnPropertyMap& connPropertyMap, const std::string_view& property_name); -void ReadConfigFile(PropertyMap& properties, const std::string& configFileName); +void ReadConfigFile(PropertyMap& properties, const std::string& configPath, + const std::string& configFileName); } // namespace odbcabstraction } // namespace driver diff --git a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/utils.cc b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/utils.cc index 45dfbcf2e42..6feb7ff3be2 100644 --- a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/utils.cc +++ b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/utils.cc @@ -81,9 +81,8 @@ std::string GetModulePath() { return std::string(path.begin(), path.begin() + dirname_length); } -void ReadConfigFile(PropertyMap& properties, const std::string& config_file_name) { - auto config_path = GetModulePath(); - +void ReadConfigFile(PropertyMap& properties, const std::string& config_path, + const std::string& config_file_name) { std::ifstream config_file; auto config_file_path = config_path + "/" + config_file_name; config_file.open(config_file_path); From 88c394476209f997fc93b33645cccdd22ca43417 Mon Sep 17 00:00:00 2001 From: rscales Date: Wed, 11 Jun 2025 09:47:05 -0700 Subject: [PATCH 12/63] Implement ODBC API with debug logging --- cpp/src/arrow/flight/sql/odbc/entry_points.cc | 219 +++++++++++++++++- 1 file changed, 218 insertions(+), 1 deletion(-) diff --git a/cpp/src/arrow/flight/sql/odbc/entry_points.cc b/cpp/src/arrow/flight/sql/odbc/entry_points.cc index 46536b84e9b..912a544e22d 100644 --- a/cpp/src/arrow/flight/sql/odbc/entry_points.cc +++ b/cpp/src/arrow/flight/sql/odbc/entry_points.cc @@ -27,6 +27,8 @@ #include "arrow/flight/sql/odbc/odbc_api.h" #include "arrow/flight/sql/odbc/visibility.h" +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/logger.h" + SQLRETURN SQL_API SQLAllocHandle(SQLSMALLINT type, SQLHANDLE parent, SQLHANDLE* result) { return arrow::SQLAllocHandle(type, parent, result); } @@ -79,7 +81,8 @@ SQLRETURN SQL_API SQLSetEnvAttr(SQLHENV env, SQLINTEGER attr, SQLPOINTER valuePt SQLRETURN SQL_API SQLSetConnectAttrW(SQLHDBC conn, SQLINTEGER attr, SQLPOINTER value, SQLINTEGER valueLen) { - // TODO implement SQLSetConnectAttr + LOG_DEBUG("SQLSetConnectAttrW called with conn: {}, attr: {}, value: {}, valueLen: {}", + conn, attr, value, valueLen); return SQL_ERROR; } @@ -109,3 +112,217 @@ SQLRETURN SQL_API SQLConnectW(SQLHDBC conn, SQLWCHAR* dsnName, SQLSMALLINT dsnNa } SQLRETURN SQL_API SQLDisconnect(SQLHDBC conn) { return arrow::SQLDisconnect(conn); } + +SQLRETURN SQL_API SQLBindCol(SQLHSTMT statementHandle, SQLUSMALLINT columnNumber, + SQLSMALLINT targetType, SQLPOINTER targetValuePtr, + SQLLEN bufferLength, SQLLEN* strLen_or_IndPtr) { + LOG_DEBUG( + "SQLBindCol called with statementHandle: {}, columnNumber: {}, targetType: {}, " + "targetValuePtr: {}, bufferLength: {}, strLen_or_IndPtr: {}", + statementHandle, columnNumber, targetType, targetValuePtr, bufferLength, + fmt::ptr(strLen_or_IndPtr)); + return SQL_ERROR; +} + +SQLRETURN SQL_API SQLCancel(SQLHSTMT statementHandle) { + LOG_DEBUG("SQLCancel called with statementHandle: {}", statementHandle); + return SQL_ERROR; +} + +SQLRETURN SQL_API SQLCloseCursor(SQLHSTMT statementHandle) { + LOG_DEBUG("SQLCloseCursor called with statementHandle: {}", statementHandle); + return SQL_ERROR; +} + +SQLRETURN SQL_API SQLColAttributeW(SQLHSTMT statementHandle, SQLUSMALLINT columnNumber, + SQLUSMALLINT fieldIdentifier, + SQLPOINTER characterAttributePtr, + SQLSMALLINT bufferLength, SQLSMALLINT* stringLengthPtr, + SQLLEN* numericAttributePtr) { + LOG_DEBUG( + "SQLColAttributeW called with statementHandle: {}, columnNumber: {}, " + "fieldIdentifier: {}, characterAttributePtr: {}, bufferLength: {}, " + "stringLengthPtr: {}, numericAttributePtr: {}", + statementHandle, columnNumber, fieldIdentifier, characterAttributePtr, bufferLength, + fmt::ptr(stringLengthPtr), fmt::ptr(numericAttributePtr)); + return SQL_ERROR; +} + +SQLRETURN SQL_API SQLColumnsW(SQLHSTMT statementHandle, SQLWCHAR* catalogName, + SQLSMALLINT catalogNameLength, SQLWCHAR* schemaName, + SQLSMALLINT schemaNameLength, SQLWCHAR* tableName, + SQLSMALLINT tableNameLength, SQLWCHAR* columnName, + SQLSMALLINT columnNameLength) { + LOG_DEBUG( + "SQLColumnsW called with statementHandle: {}, catalogName: {}, catalogNameLength: " + "{}, " + "schemaName: {}, schemaNameLength: {}, tableName: {}, tableNameLength: {}, " + "columnName: {}, " + "columnNameLength: {}", + statementHandle, fmt::ptr(catalogName), catalogNameLength, fmt::ptr(schemaName), + schemaNameLength, fmt::ptr(tableName), tableNameLength, fmt::ptr(columnName), + columnNameLength); + return SQL_ERROR; +} + +SQLRETURN SQL_API SQLErrorW(SQLHENV handleType, SQLHDBC handle, SQLHSTMT hstmt, + SQLWCHAR FAR* szSqlState, SQLINTEGER FAR* pfNativeError, + SQLWCHAR FAR* szErrorMsg, SQLSMALLINT cbErrorMsgMax, + SQLSMALLINT FAR* pcbErrorMsg) { + LOG_DEBUG( + "SQLErrorW called with handleType: {}, handle: {}, hstmt: {}, szSqlState: {}, " + "pfNativeError: {}, szErrorMsg: {}, cbErrorMsgMax: {}, pcbErrorMsg: {}", + handleType, handle, hstmt, fmt::ptr(szSqlState), fmt::ptr(pfNativeError), + fmt::ptr(szErrorMsg), cbErrorMsgMax, fmt::ptr(pcbErrorMsg)); + return SQL_ERROR; +} + +SQLRETURN SQL_API SQLExecDirectW(SQLHSTMT statementHandle, SQLWCHAR* statementText, + SQLINTEGER textLength) { + LOG_DEBUG( + "SQLExecDirectW called with statementHandle: {}, statementText: {}, textLength: {}", + statementHandle, fmt::ptr(statementText), textLength); + return SQL_ERROR; +} + +SQLRETURN SQL_API SQLExecute(SQLHSTMT statementHandle) { + LOG_DEBUG("SQLExecute called with statementHandle: {}", statementHandle); + return SQL_ERROR; +} + +SQLRETURN SQL_API SQLFetch(SQLHSTMT statementHandle) { + LOG_DEBUG("SQLFetch called with statementHandle: {}", statementHandle); + return SQL_ERROR; +} + +SQLRETURN SQL_API SQLForeignKeysW(SQLHSTMT statementHandle, SQLWCHAR* pKCatalogName, + SQLSMALLINT pKCatalogNameLength, SQLWCHAR* pKSchemaName, + SQLSMALLINT pKSchemaNameLength, SQLWCHAR* pKTableName, + SQLSMALLINT pKTableNameLength, SQLWCHAR* fKCatalogName, + SQLSMALLINT fKCatalogNameLength, SQLWCHAR* fKSchemaName, + SQLSMALLINT fKSchemaNameLength, SQLWCHAR* fKTableName, + SQLSMALLINT fKTableNameLength) { + LOG_DEBUG( + "SQLForeignKeysW called with statementHandle: {}, pKCatalogName: {}, " + "pKCatalogNameLength: " + "{}, pKSchemaName: {}, pKSchemaNameLength: {}, pKTableName: {}, pKTableNameLength: " + "{}, " + "fKCatalogName: {}, fKCatalogNameLength: {}, fKSchemaName: {}, fKSchemaNameLength: " + "{}, " + "fKTableName: {}, fKTableNameLength : {}", + statementHandle, fmt::ptr(pKCatalogName), pKCatalogNameLength, + fmt::ptr(pKSchemaName), pKSchemaNameLength, fmt::ptr(pKTableName), + pKTableNameLength, fmt::ptr(fKCatalogName), fKCatalogNameLength, + fmt::ptr(fKSchemaName), fKSchemaNameLength, fmt::ptr(fKTableName), + fKTableNameLength); + return SQL_ERROR; +} + +SQLRETURN SQL_API SQLGetConnectAttrW(SQLHDBC connectionHandle, SQLINTEGER attribute, + SQLPOINTER valuePtr, SQLINTEGER bufferLength, + SQLINTEGER* stringLengthPtr) { + LOG_DEBUG( + "SQLGetConnectAttrW called with connectionHandle: {}, attribute: {}, valuePtr: {}, " + "bufferLength: {}, stringLengthPtr: {}", + connectionHandle, attribute, valuePtr, bufferLength, fmt::ptr(stringLengthPtr)); + return SQL_ERROR; +} + +SQLRETURN SQL_API SQLGetData(SQLHSTMT statementHandle, SQLUSMALLINT col_or_Param_Num, + SQLSMALLINT targetType, SQLPOINTER targetValuePtr, + SQLLEN bufferLength, SQLLEN* strLen_or_IndPtr) { + LOG_DEBUG( + "SQLGetData called with statementHandle: {}, col_or_Param_Num: {}, targetType: {}, " + "targetValuePtr: {}, bufferLength: {}, strLen_or_IndPtr: {}", + statementHandle, col_or_Param_Num, targetType, targetValuePtr, bufferLength, + fmt::ptr(strLen_or_IndPtr)); + return SQL_ERROR; +} + +SQLRETURN SQL_API SQLGetStmtAttrW(SQLHSTMT statementHandle, SQLINTEGER attribute, + SQLPOINTER valuePtr, SQLINTEGER bufferLength, + SQLINTEGER* stringLengthPtr) { + LOG_DEBUG( + "SQLGetStmtAttrW called with statementHandle: {}, attribute: {}, valuePtr: {}, " + "bufferLength: {}, stringLengthPtr: {}", + statementHandle, attribute, valuePtr, bufferLength, fmt::ptr(stringLengthPtr)); + return SQL_ERROR; +} + +SQLRETURN SQL_API SQLGetTypeInfoW(SQLHSTMT statementHandle, SQLSMALLINT dataType) { + LOG_DEBUG("SQLGetTypeInfoW called with statementHandle: {} dataType: {}", + statementHandle, dataType); + return SQL_ERROR; +} + +SQLRETURN SQL_API SQLMoreResults(SQLHSTMT statementHandle) { + LOG_DEBUG("SQLMoreResults called with statementHandle: {}", statementHandle); + return SQL_ERROR; +} + +SQLRETURN SQL_API SQLNativeSqlW(SQLHDBC connectionHandle, SQLWCHAR* inStatementText, + SQLINTEGER inStatementTextLength, + SQLWCHAR* outStatementText, SQLINTEGER bufferLength, + SQLINTEGER* outStatementTextLength) { + LOG_DEBUG( + "SQLNativeSqlW called with connectionHandle: {}, inStatementText: {}, " + "inStatementTextLength: " + "{}, outStatementText: {}, bufferLength: {}, outStatementTextLength: {}", + connectionHandle, fmt::ptr(inStatementText), inStatementTextLength, + fmt::ptr(outStatementText), bufferLength, fmt::ptr(outStatementTextLength)); + return SQL_ERROR; +} + +SQLRETURN SQL_API SQLNumResultCols(SQLHSTMT statementHandle, + SQLSMALLINT* columnCountPtr) { + LOG_DEBUG("SQLNumResultCols called with statementHandle: {}, columnCountPtr: {}", + statementHandle, fmt::ptr(columnCountPtr)); + return SQL_ERROR; +} + +SQLRETURN SQL_API SQLPrepareW(SQLHSTMT statementHandle, SQLWCHAR* statementText, + SQLINTEGER textLength) { + LOG_DEBUG( + "SQLPrepareW called with statementHandle: {}, statementText: {}, textLength: {}", + statementHandle, fmt::ptr(statementText), textLength); + return SQL_ERROR; +} + +SQLRETURN SQL_API SQLPrimaryKeysW(SQLHSTMT statementHandle, SQLWCHAR* catalogName, + SQLSMALLINT catalogNameLength, SQLWCHAR* schemaName, + SQLSMALLINT schemaNameLength, SQLWCHAR* tableName, + SQLSMALLINT tableNameLength) { + LOG_DEBUG( + "SQLPrimaryKeysW called with statementHandle: {}, catalogName: {}, " + "catalogNameLength: " + "{}, schemaName: {}, schemaNameLength: {}, tableName: {}, tableNameLength: {}", + statementHandle, fmt::ptr(catalogName), catalogNameLength, fmt::ptr(schemaName), + schemaNameLength, fmt::ptr(tableName), tableNameLength); + return SQL_ERROR; +} + +SQLRETURN SQL_API SQLSetStmtAttrW(SQLHSTMT statementHandle, SQLINTEGER attribute, + SQLPOINTER valuePtr, SQLINTEGER stringLength) { + LOG_DEBUG( + "SQLSetStmtAttrW called with statementHandle: {}, attribute: {}, valuePtr: {}, " + "stringLength: {}", + statementHandle, attribute, valuePtr, stringLength); + return SQL_ERROR; +} + +SQLRETURN SQL_API SQLTablesW(SQLHSTMT statementHandle, SQLWCHAR* catalogName, + SQLSMALLINT catalogNameLength, SQLWCHAR* schemaName, + SQLSMALLINT schemaNameLength, SQLWCHAR* tableName, + SQLSMALLINT tableNameLength, SQLWCHAR* tableType, + SQLSMALLINT tableTypeLength) { + LOG_DEBUG( + "SQLTablesW called with statementHandle: {}, catalogName: {}, catalogNameLength: " + "{}, " + "schemaName: {}, schemaNameLength: {}, tableName: {}, tableNameLength: {}, " + "tableType: {}, " + "tableTypeLength: {}", + statementHandle, fmt::ptr(catalogName), catalogNameLength, fmt::ptr(schemaName), + schemaNameLength, fmt::ptr(tableName), tableNameLength, fmt::ptr(tableType), + tableTypeLength); + return SQL_ERROR; +} From 8853731805d8c67793f479c48465c34ae09d84b1 Mon Sep 17 00:00:00 2001 From: "Alina (Xi) Li" <96995091+alinaliBQ@users.noreply.github.com> Date: Thu, 12 Jun 2025 09:35:40 -0700 Subject: [PATCH 13/63] Enable mock test (#42) * Add todo for noauth validation * mock server with token auth Add tests * run same test with both modes * Enable ODBC tests in workflow * Switch current test cases to use FlightSQLODBCTestBase So the tests can be skipped when `TEST_CONNECT_STR` is not set. * Change tests to run on both mock and remote modes Wrap usage of TEST_CONNECT_STR where possible * Rename test fixtures and make connection string functions virtual * Fix lint issue * Attempt to enable ODBC build on Windows platforms * Attempt to fix clang64 and MinGW errors * Attempt to register ODBC * Address James' comments Use constant string for token * use ServerMiddleware to validate token --- .github/workflows/cpp.yml | 4 + ci/scripts/cpp_test.sh | 2 - cpp/cmake_modules/DefineOptions.cmake | 2 +- .../odbc/flight_sql/flight_sql_auth_method.cc | 4 + .../flight/sql/odbc/flight_sql/system_dsn.h | 2 +- .../flight/sql/odbc/tests/CMakeLists.txt | 12 ++ .../flight/sql/odbc/tests/connection_test.cc | 111 ++++++++---------- .../flight/sql/odbc/tests/odbc_test_suite.cc | 97 ++++++++++++++- .../flight/sql/odbc/tests/odbc_test_suite.h | 83 ++++++++++++- 9 files changed, 248 insertions(+), 69 deletions(-) diff --git a/.github/workflows/cpp.yml b/.github/workflows/cpp.yml index 8c4388fc0f9..70332773302 100644 --- a/.github/workflows/cpp.yml +++ b/.github/workflows/cpp.yml @@ -474,6 +474,10 @@ jobs: PIPX_BASE_PYTHON: ${{ steps.python-install.outputs.python-path }} run: | ci/scripts/install_gcs_testbench.sh default + - name: Register Flight SQL ODBC Driver + shell: cmd + run: | + call "cpp\src\arrow\flight\sql\odbc\install\install_amd64.cmd" ${{github.workspace}}\build\cpp\%ARROW_BUILD_TYPE%\libarrow_flight_sql_odbc.dll - name: Test shell: msys2 {0} run: | diff --git a/ci/scripts/cpp_test.sh b/ci/scripts/cpp_test.sh index 05885ce4018..e646ba964a5 100755 --- a/ci/scripts/cpp_test.sh +++ b/ci/scripts/cpp_test.sh @@ -73,8 +73,6 @@ case "$(uname)" in exclude_tests="${exclude_tests}|gandiva-precompiled-test" exclude_tests="${exclude_tests}|gandiva-projector-test" exclude_tests="${exclude_tests}|gandiva-utf8-test" - # TODO: Enable ODBC tests - exclude_tests="${exclude_tests}|arrow-connection-test" ctest_options+=(--exclude-regex "${exclude_tests}") ;; *) diff --git a/cpp/cmake_modules/DefineOptions.cmake b/cpp/cmake_modules/DefineOptions.cmake index 581c30e1aaa..05957d4b275 100644 --- a/cpp/cmake_modules/DefineOptions.cmake +++ b/cpp/cmake_modules/DefineOptions.cmake @@ -108,7 +108,7 @@ endmacro() macro(resolve_option_dependencies) # Arrow Flight SQL ODBC is available only for Windows for now. - if(NOT MSVC_TOOLCHAIN) + if(NOT WIN32) set(ARROW_FLIGHT_SQL_ODBC OFF) endif() if(MSVC_TOOLCHAIN) diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_auth_method.cc b/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_auth_method.cc index 3fcc3a87162..4b66c30dab0 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_auth_method.cc +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_auth_method.cc @@ -45,6 +45,10 @@ class NoOpAuthMethod : public FlightSqlAuthMethod { void Authenticate(FlightSqlConnection& connection, FlightCallOptions& call_options) override { // Do nothing + + // TODO: implement NoOpAuthMethod to validate server address. + // Can use NoOpClientAuthHandler. + // https://github.com/apache/arrow/issues/46733 } }; diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/system_dsn.h b/cpp/src/arrow/flight/sql/odbc/flight_sql/system_dsn.h index f5470693eea..f3744d3428a 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/system_dsn.h +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/system_dsn.h @@ -40,7 +40,7 @@ bool DisplayConnectionWindow(void* windowParent, Configuration& config); * @param windowParent Parent window handle. * @param config Output configuration, presumed to be empty, it will be using values from * properties. - * @param config Output properties. + * @param properties Output properties. * @return True on success and false on fail. */ bool DisplayConnectionWindow(void* windowParent, Configuration& config, diff --git a/cpp/src/arrow/flight/sql/odbc/tests/CMakeLists.txt b/cpp/src/arrow/flight/sql/odbc/tests/CMakeLists.txt index 1d0dce0bec4..41e51182275 100644 --- a/cpp/src/arrow/flight/sql/odbc/tests/CMakeLists.txt +++ b/cpp/src/arrow/flight/sql/odbc/tests/CMakeLists.txt @@ -21,13 +21,25 @@ include_directories(${ODBC_INCLUDE_DIRS}) add_definitions(-DUNICODE=1) +find_package(SQLite3Alt REQUIRED) + +set(ARROW_FLIGHT_SQL_MOCK_SERVER_SRCS + ../../example/sqlite_sql_info.cc + ../../example/sqlite_type_info.cc + ../../example/sqlite_statement.cc + ../../example/sqlite_statement_batch_reader.cc + ../../example/sqlite_server.cc + ../../example/sqlite_tables_schema_batch_reader.cc) + add_arrow_test(connection_test SOURCES connection_test.cc odbc_test_suite.cc odbc_test_suite.h + ${ARROW_FLIGHT_SQL_MOCK_SERVER_SRCS} EXTRA_LINK_LIBS ${ODBC_LIBRARIES} ${ODBCINST} + ${SQLite3_LIBRARIES} arrow_odbc_spi_impl odbcabstraction) diff --git a/cpp/src/arrow/flight/sql/odbc/tests/connection_test.cc b/cpp/src/arrow/flight/sql/odbc/tests/connection_test.cc index b2866b3e5a7..8461ead6cf9 100644 --- a/cpp/src/arrow/flight/sql/odbc/tests/connection_test.cc +++ b/cpp/src/arrow/flight/sql/odbc/tests/connection_test.cc @@ -200,49 +200,51 @@ TEST(SQLSetEnvAttr, TestSQLSetEnvAttrODBCVersionInvalid) { EXPECT_TRUE(return_set == SQL_ERROR); } -TEST_F(FlightSQLODBCTestBase, TestSQLGetEnvAttrOutputNTS) { - connect(); +TYPED_TEST(FlightSQLODBCTestBase, TestSQLGetEnvAttrOutputNTS) { + this->connect(); SQLINTEGER output_nts; - SQLRETURN return_get = SQLGetEnvAttr(env, SQL_ATTR_OUTPUT_NTS, &output_nts, 0, 0); + SQLRETURN return_get = SQLGetEnvAttr(this->env, SQL_ATTR_OUTPUT_NTS, &output_nts, 0, 0); EXPECT_TRUE(return_get == SQL_SUCCESS); EXPECT_EQ(output_nts, SQL_TRUE); - disconnect(); + this->disconnect(); } -TEST_F(FlightSQLODBCTestBase, TestSQLGetEnvAttrGetLength) { +TYPED_TEST(FlightSQLODBCTestBase, TestSQLGetEnvAttrGetLength) { // Test is disabled because call to SQLGetEnvAttr is handled by the driver manager on - // Windows. This test case can be potentionally used on macOS/Linux + // Windows. This test case can be potentially used on macOS/Linux GTEST_SKIP(); - connect(); + this->connect(); SQLINTEGER length; - SQLRETURN return_get = SQLGetEnvAttr(env, SQL_ATTR_ODBC_VERSION, nullptr, 0, &length); + SQLRETURN return_get = + SQLGetEnvAttr(this->env, SQL_ATTR_ODBC_VERSION, nullptr, 0, &length); EXPECT_TRUE(return_get == SQL_SUCCESS); EXPECT_EQ(length, sizeof(SQLINTEGER)); - disconnect(); + this->disconnect(); } -TEST_F(FlightSQLODBCTestBase, TestSQLGetEnvAttrNullValuePointer) { +TYPED_TEST(FlightSQLODBCTestBase, TestSQLGetEnvAttrNullValuePointer) { // Test is disabled because call to SQLGetEnvAttr is handled by the driver manager on - // Windows. This test case can be potentionally used on macOS/Linux + // Windows. This test case can be potentially used on macOS/Linux GTEST_SKIP(); - connect(); + this->connect(); - SQLRETURN return_get = SQLGetEnvAttr(env, SQL_ATTR_ODBC_VERSION, nullptr, 0, nullptr); + SQLRETURN return_get = + SQLGetEnvAttr(this->env, SQL_ATTR_ODBC_VERSION, nullptr, 0, nullptr); EXPECT_TRUE(return_get == SQL_ERROR); - disconnect(); + this->disconnect(); } TEST(SQLSetEnvAttr, TestSQLSetEnvAttrOutputNTSValid) { @@ -292,7 +294,7 @@ TEST(SQLSetEnvAttr, TestSQLSetEnvAttrNullValuePointer) { EXPECT_TRUE(return_set == SQL_ERROR); } -TEST(SQLDriverConnect, TestSQLDriverConnect) { +TYPED_TEST(FlightSQLODBCTestBase, TestSQLDriverConnect) { // ODBC Environment SQLHENV env; SQLHDBC conn; @@ -312,8 +314,7 @@ TEST(SQLDriverConnect, TestSQLDriverConnect) { EXPECT_TRUE(ret == SQL_SUCCESS); // Connect string - ASSERT_OK_AND_ASSIGN(std::string connect_str, - arrow::internal::GetEnvVar(TEST_CONNECT_STR)); + std::string connect_str = this->getConnectionString(); ASSERT_OK_AND_ASSIGN(std::wstring wconnect_str, arrow::util::UTF8ToWideString(connect_str)); std::vector connect_str0(wconnect_str.begin(), wconnect_str.end()); @@ -361,7 +362,7 @@ TEST(SQLDriverConnect, TestSQLDriverConnect) { EXPECT_TRUE(ret == SQL_SUCCESS); } -TEST(SQLDriverConnect, TestSQLDriverConnectInvalidUid) { +TEST_F(FlightSQLODBCRemoteTestBase, TestSQLDriverConnectInvalidUid) { // ODBC Environment SQLHENV env; SQLHDBC conn; @@ -380,11 +381,8 @@ TEST(SQLDriverConnect, TestSQLDriverConnectInvalidUid) { EXPECT_TRUE(ret == SQL_SUCCESS); - // Connect string - ASSERT_OK_AND_ASSIGN(std::string connect_str, - arrow::internal::GetEnvVar(TEST_CONNECT_STR)); - // Append invalid uid to connection string - connect_str += std::string("uid=non_existent_id;"); + // Invalid connect string + std::string connect_str = getInvalidConnectionString(); ASSERT_OK_AND_ASSIGN(std::wstring wconnect_str, arrow::util::UTF8ToWideString(connect_str)); @@ -418,7 +416,7 @@ TEST(SQLDriverConnect, TestSQLDriverConnectInvalidUid) { EXPECT_TRUE(ret == SQL_SUCCESS); } -TEST(SQLConnect, TestSQLConnect) { +TYPED_TEST(FlightSQLODBCTestBase, TestSQLConnect) { // ODBC Environment SQLHENV env; SQLHDBC conn; @@ -438,8 +436,7 @@ TEST(SQLConnect, TestSQLConnect) { EXPECT_TRUE(ret == SQL_SUCCESS); // Connect string - ASSERT_OK_AND_ASSIGN(std::string connect_str, - arrow::internal::GetEnvVar(TEST_CONNECT_STR)); + std::string connect_str = this->getConnectionString(); // Write connection string content into a DSN, // must succeed before continuing @@ -454,7 +451,7 @@ TEST(SQLConnect, TestSQLConnect) { std::vector uid0(wuid.begin(), wuid.end()); std::vector pwd0(wpwd.begin(), wpwd.end()); - // Connecting to ODBC server. + // Connecting to ODBC server. Empty uid and pwd should be ignored. ret = SQLConnect(conn, dsn0.data(), static_cast(dsn0.size()), uid0.data(), static_cast(uid0.size()), pwd0.data(), static_cast(pwd0.size())); @@ -488,7 +485,7 @@ TEST(SQLConnect, TestSQLConnect) { EXPECT_TRUE(ret == SQL_SUCCESS); } -TEST(SQLConnect, TestSQLConnectInputUidPwd) { +TEST_F(FlightSQLODBCRemoteTestBase, TestSQLConnectInputUidPwd) { // ODBC Environment SQLHENV env; SQLHDBC conn; @@ -508,10 +505,9 @@ TEST(SQLConnect, TestSQLConnectInputUidPwd) { EXPECT_TRUE(ret == SQL_SUCCESS); // Connect string - ASSERT_OK_AND_ASSIGN(std::string connect_str, - arrow::internal::GetEnvVar(TEST_CONNECT_STR)); + std::string connect_str = getConnectionString(); - // Retrieve valid uid and pwd + // Retrieve valid uid and pwd, assumes TEST_CONNECT_STR contains uid and pwd Connection::ConnPropertyMap properties; ODBC::ODBCConnection::getPropertiesFromConnString(connect_str, properties); std::string uid_key("uid"); @@ -567,7 +563,7 @@ TEST(SQLConnect, TestSQLConnectInputUidPwd) { EXPECT_TRUE(ret == SQL_SUCCESS); } -TEST(SQLConnect, TestSQLConnectInvalidUid) { +TEST_F(FlightSQLODBCRemoteTestBase, TestSQLConnectInvalidUid) { // ODBC Environment SQLHENV env; SQLHDBC conn; @@ -587,10 +583,9 @@ TEST(SQLConnect, TestSQLConnectInvalidUid) { EXPECT_TRUE(ret == SQL_SUCCESS); // Connect string - ASSERT_OK_AND_ASSIGN(std::string connect_str, - arrow::internal::GetEnvVar(TEST_CONNECT_STR)); + std::string connect_str = getConnectionString(); - // Retrieve valid uid and pwd + // Retrieve valid uid and pwd, assumes TEST_CONNECT_STR contains uid and pwd Connection::ConnPropertyMap properties; ODBC::ODBCConnection::getPropertiesFromConnString(connect_str, properties); std::string uid = properties[std::string("uid")]; @@ -636,7 +631,7 @@ TEST(SQLConnect, TestSQLConnectInvalidUid) { EXPECT_TRUE(ret == SQL_SUCCESS); } -TEST(SQLConnect, TestSQLConnectDSNPrecedence) { +TEST_F(FlightSQLODBCRemoteTestBase, TestSQLConnectDSNPrecedence) { // ODBC Environment SQLHENV env; SQLHDBC conn; @@ -656,13 +651,13 @@ TEST(SQLConnect, TestSQLConnectDSNPrecedence) { EXPECT_TRUE(ret == SQL_SUCCESS); // Connect string - ASSERT_OK_AND_ASSIGN(std::string connect_str, - arrow::internal::GetEnvVar(TEST_CONNECT_STR)); + std::string connect_str = getConnectionString(); // Write connection string content into a DSN, // must succeed before continuing - // Pass incorrect uid and password to SQLConnect, they will be ignored + // Pass incorrect uid and password to SQLConnect, they will be ignored. + // Assumes TEST_CONNECT_STR contains uid and pwd std::string uid("non_existent_id"), pwd("non_existent_password"); ASSERT_TRUE(writeDSN(connect_str)); @@ -746,7 +741,7 @@ TEST(SQLDisconnect, TestSQLDisconnectWithoutConnection) { EXPECT_TRUE(ret == SQL_SUCCESS); } -TEST(SQLGetDiagFieldW, TestSQLGetDiagFieldWForConnectFailure) { +TYPED_TEST(FlightSQLODBCTestBase, TestSQLGetDiagFieldWForConnectFailure) { // ODBC Environment SQLHENV env; SQLHDBC conn; @@ -765,11 +760,8 @@ TEST(SQLGetDiagFieldW, TestSQLGetDiagFieldWForConnectFailure) { EXPECT_TRUE(ret == SQL_SUCCESS); - // Connect string - ASSERT_OK_AND_ASSIGN(std::string connect_str, - arrow::internal::GetEnvVar(TEST_CONNECT_STR)); - // Append invalid uid to connection string - connect_str += std::string("uid=non_existent_id;"); + // Invalid connect string + std::string connect_str = this->getInvalidConnectionString(); ASSERT_OK_AND_ASSIGN(std::wstring wconnect_str, arrow::util::UTF8ToWideString(connect_str)); @@ -859,9 +851,9 @@ TEST(SQLGetDiagFieldW, TestSQLGetDiagFieldWForConnectFailure) { EXPECT_TRUE(ret == SQL_SUCCESS); } -TEST(SQLGetDiagFieldW, TestSQLGetDiagFieldWForConnectFailureNTS) { +TYPED_TEST(FlightSQLODBCTestBase, TestSQLGetDiagFieldWForConnectFailureNTS) { // Test is disabled because driver manager on Windows does not pass through SQL_NTS - // This test case can be potentionally used on macOS/Linux + // This test case can be potentially used on macOS/Linux GTEST_SKIP(); // ODBC Environment SQLHENV env; @@ -881,11 +873,8 @@ TEST(SQLGetDiagFieldW, TestSQLGetDiagFieldWForConnectFailureNTS) { EXPECT_TRUE(ret == SQL_SUCCESS); - // Connect string - ASSERT_OK_AND_ASSIGN(std::string connect_str, - arrow::internal::GetEnvVar(TEST_CONNECT_STR)); - // Append invalid uid to connection string - connect_str += std::string("uid=non_existent_id;"); + // Invalid connect string + std::string connect_str = this->getInvalidConnectionString(); ASSERT_OK_AND_ASSIGN(std::wstring wconnect_str, arrow::util::UTF8ToWideString(connect_str)); @@ -902,7 +891,6 @@ TEST(SQLGetDiagFieldW, TestSQLGetDiagFieldWForConnectFailureNTS) { EXPECT_TRUE(ret == SQL_ERROR); // Retrieve all supported header level and record level data - SQLSMALLINT HEADER_LEVEL = 0; SQLSMALLINT RECORD_1 = 1; // SQL_DIAG_MESSAGE_TEXT SQL_NTS @@ -929,7 +917,7 @@ TEST(SQLGetDiagFieldW, TestSQLGetDiagFieldWForConnectFailureNTS) { EXPECT_TRUE(ret == SQL_SUCCESS); } -TEST(SQLGetDiagRec, TestSQLGetDiagRecForConnectFailure) { +TYPED_TEST(FlightSQLODBCTestBase, TestSQLGetDiagRecForConnectFailure) { // ODBC Environment SQLHENV env; SQLHDBC conn; @@ -948,11 +936,8 @@ TEST(SQLGetDiagRec, TestSQLGetDiagRecForConnectFailure) { EXPECT_TRUE(ret == SQL_SUCCESS); - // Connect string - ASSERT_OK_AND_ASSIGN(std::string connect_str, - arrow::internal::GetEnvVar(TEST_CONNECT_STR)); - // Append invalid uid to connection string - connect_str += std::string("uid=non_existent_id;"); + // Invalid connect string + std::string connect_str = this->getInvalidConnectionString(); ASSERT_OK_AND_ASSIGN(std::wstring wconnect_str, arrow::util::UTF8ToWideString(connect_str)); @@ -978,7 +963,7 @@ TEST(SQLGetDiagRec, TestSQLGetDiagRecForConnectFailure) { EXPECT_TRUE(ret == SQL_SUCCESS); - EXPECT_GT(message_length, 200); + EXPECT_GT(message_length, 120); EXPECT_EQ(native_error, 200); @@ -1000,6 +985,12 @@ TEST(SQLGetDiagRec, TestSQLGetDiagRecForConnectFailure) { EXPECT_TRUE(ret == SQL_SUCCESS); } +TYPED_TEST(FlightSQLODBCTestBase, TestConnect) { + // Verifies connect and disconnect works on its own + this->connect(); + this->disconnect(); +} + } // namespace integration_tests } // namespace odbc } // namespace flight diff --git a/cpp/src/arrow/flight/sql/odbc/tests/odbc_test_suite.cc b/cpp/src/arrow/flight/sql/odbc/tests/odbc_test_suite.cc index d60bcea19e4..656c221a1d1 100644 --- a/cpp/src/arrow/flight/sql/odbc/tests/odbc_test_suite.cc +++ b/cpp/src/arrow/flight/sql/odbc/tests/odbc_test_suite.cc @@ -29,7 +29,11 @@ namespace arrow { namespace flight { namespace odbc { namespace integration_tests { -void FlightSQLODBCTestBase::connect() { +void FlightSQLODBCRemoteTestBase::connect() { + std::string connect_str = getConnectionString(); + connectWithString(connect_str); +} +void FlightSQLODBCRemoteTestBase::connectWithString(std::string connect_str) { // Allocate an environment handle SQLRETURN ret = SQLAllocEnv(&env); @@ -45,8 +49,6 @@ void FlightSQLODBCTestBase::connect() { EXPECT_TRUE(ret == SQL_SUCCESS); // Connect string - ASSERT_OK_AND_ASSIGN(std::string connect_str, - arrow::internal::GetEnvVar(TEST_CONNECT_STR)); std::vector connect_str0(connect_str.begin(), connect_str.end()); SQLWCHAR outstr[ODBC_BUFFER_SIZE]; @@ -65,7 +67,7 @@ void FlightSQLODBCTestBase::connect() { ASSERT_TRUE(ret == SQL_SUCCESS); } -void FlightSQLODBCTestBase::disconnect() { +void FlightSQLODBCRemoteTestBase::disconnect() { // Disconnect from ODBC SQLRETURN ret = SQLDisconnect(conn); @@ -86,6 +88,93 @@ void FlightSQLODBCTestBase::disconnect() { EXPECT_TRUE(ret == SQL_SUCCESS); } +std::string FlightSQLODBCRemoteTestBase::getConnectionString() { + std::string connect_str = arrow::internal::GetEnvVar(TEST_CONNECT_STR).ValueOrDie(); + return connect_str; +} + +std::string FlightSQLODBCRemoteTestBase::getInvalidConnectionString() { + std::string connect_str = getConnectionString(); + // Append invalid uid to connection string + connect_str += std::string("uid=non_existent_id;"); + return connect_str; +} + +void FlightSQLODBCRemoteTestBase::SetUp() { + if (arrow::internal::GetEnvVar(TEST_CONNECT_STR).ValueOr("").empty()) { + GTEST_SKIP() << "Skipping FlightSQLODBCRemoteTestBase test: TEST_CONNECT_STR not set"; + } +} + +std::string FindTokenInCallHeaders(const CallHeaders& incoming_headers) { + // Lambda function to compare characters without case sensitivity. + auto char_compare = [](const char& char1, const char& char2) { + return (::toupper(char1) == ::toupper(char2)); + }; + + const std::string auth_val(incoming_headers.find(kAuthHeader)->second); + std::string bearer_token(""); + if (auth_val.size() > kBearerPrefix.length()) { + if (std::equal(auth_val.begin(), auth_val.begin() + kBearerPrefix.length(), + kBearerPrefix.begin(), char_compare)) { + bearer_token = auth_val.substr(kBearerPrefix.length()); + } + } + return bearer_token; +} + +void MockServerMiddleware::SendingHeaders(AddCallHeaders* outgoing_headers) { + std::string bearer_token = FindTokenInCallHeaders(incoming_headers_); + *isValid_ = (bearer_token == std::string(test_token)); +} + +Status MockServerMiddlewareFactory::StartCall( + const CallInfo& info, const ServerCallContext& context, + std::shared_ptr* middleware) { + std::string bearer_token = FindTokenInCallHeaders(context.incoming_headers()); + if (bearer_token == std::string(test_token)) { + *middleware = + std::make_shared(context.incoming_headers(), &isValid_); + } else { + return MakeFlightError(FlightStatusCode::Unauthenticated, + "Invalid token for mock server"); + } + + return Status::OK(); +} + +std::string FlightSQLODBCMockTestBase::getConnectionString() { + std::string connect_str( + "driver={Apache Arrow Flight SQL ODBC Driver};HOST=localhost;port=" + + std::to_string(port) + ";token=" + std::string(test_token) + + ";useEncryption=false;"); + return connect_str; +} + +std::string FlightSQLODBCMockTestBase::getInvalidConnectionString() { + std::string connect_str = getConnectionString(); + // Append invalid token to connection string + connect_str += std::string("token=invalid_token;"); + return connect_str; +} + +void FlightSQLODBCMockTestBase::SetUp() { + ASSERT_OK_AND_ASSIGN(auto location, Location::ForGrpcTcp("0.0.0.0", 0)); + arrow::flight::FlightServerOptions options(location); + options.auth_handler = std::make_unique(); + options.middleware.push_back( + {"bearer-auth-server", std::make_shared()}); + ASSERT_OK_AND_ASSIGN(server, + arrow::flight::sql::example::SQLiteFlightSqlServer::Create()); + ASSERT_OK(server->Init(options)); + + port = server->port(); + ASSERT_OK_AND_ASSIGN(location, Location::ForGrpcTcp("localhost", port)); + ASSERT_OK_AND_ASSIGN(auto client, arrow::flight::FlightClient::Connect(location)); +} + +void FlightSQLODBCMockTestBase::TearDown() { ASSERT_OK(server->Shutdown()); } + bool compareConnPropertyMap(Connection::ConnPropertyMap map1, Connection::ConnPropertyMap map2) { if (map1.size() != map2.size()) return false; diff --git a/cpp/src/arrow/flight/sql/odbc/tests/odbc_test_suite.h b/cpp/src/arrow/flight/sql/odbc/tests/odbc_test_suite.h index 49ab2e20f44..168204e8d0b 100644 --- a/cpp/src/arrow/flight/sql/odbc/tests/odbc_test_suite.h +++ b/cpp/src/arrow/flight/sql/odbc/tests/odbc_test_suite.h @@ -19,6 +19,9 @@ #include "arrow/util/io_util.h" #include "arrow/util/utf8.h" +#include "arrow/flight/server_middleware.h" +#include "arrow/flight/sql/client.h" +#include "arrow/flight/sql/example/sqlite_server.h" #include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/encoding_utils.h" #ifdef _WIN32 @@ -29,6 +32,8 @@ #include #include +#include + #include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/odbc_connection.h" // For DSN registration @@ -43,13 +48,20 @@ namespace odbc { namespace integration_tests { using driver::odbcabstraction::Connection; -class FlightSQLODBCTestBase : public ::testing::Test { +class FlightSQLODBCRemoteTestBase : public ::testing::Test { public: /// \brief Connect to Arrow Flight SQL server using connection string defined in /// environment variable "ARROW_FLIGHT_SQL_ODBC_CONN" void connect(); + /// \brief Connect to Arrow Flight SQL server using connection string + void connectWithString(std::string connection_str); /// \brief Disconnect from server void disconnect(); + /// \brief Get connection string from environment variable "ARROW_FLIGHT_SQL_ODBC_CONN" + std::string virtual getConnectionString(); + /// \brief Get invalid connection string based on connection string defined in + /// environment variable "ARROW_FLIGHT_SQL_ODBC_CONN" + std::string virtual getInvalidConnectionString(); /** ODBC Environment. */ SQLHENV env; @@ -59,8 +71,77 @@ class FlightSQLODBCTestBase : public ::testing::Test { /** ODBC Statement. */ SQLHSTMT stmt; + + protected: + void SetUp() override; +}; + +static constexpr std::string_view kAuthHeader = "authorization"; +static constexpr std::string_view kBearerPrefix = "Bearer "; +static constexpr std::string_view test_token = "t0k3n"; + +std::string FindTokenInCallHeaders(const CallHeaders& incoming_headers); + +// A server middleware for validating incoming bearer header authentication. +class MockServerMiddleware : public ServerMiddleware { + public: + explicit MockServerMiddleware(const CallHeaders& incoming_headers, bool* isValid) + : isValid_(isValid) { + incoming_headers_ = incoming_headers; + } + + void SendingHeaders(AddCallHeaders* outgoing_headers) override; + + void CallCompleted(const Status& status) override {} + + std::string name() const override { return "MockServerMiddleware"; } + + private: + CallHeaders incoming_headers_; + bool* isValid_; +}; + +// Factory for base64 header authentication testing. +class MockServerMiddlewareFactory : public ServerMiddlewareFactory { + public: + MockServerMiddlewareFactory() : isValid_(false) {} + + Status StartCall(const CallInfo& info, const ServerCallContext& context, + std::shared_ptr* middleware) override; + + private: + bool isValid_; +}; + +class FlightSQLODBCMockTestBase : public FlightSQLODBCRemoteTestBase { + // Sets up a mock server for each test case + public: + /// \brief Get connection string for mock server + std::string getConnectionString() override; + /// \brief Get invalid connection string for mock server + std::string getInvalidConnectionString() override; + + int port; + + protected: + void SetUp() override; + + void TearDown() override; + + private: + std::shared_ptr server; }; +template +class FlightSQLODBCTestBase : public T { + public: + using List = std::list; +}; + +using TestTypes = + ::testing::Types; +TYPED_TEST_SUITE(FlightSQLODBCTestBase, TestTypes); + /** ODBC read buffer size. */ enum { ODBC_BUFFER_SIZE = 1024 }; From f0bf0d17c353e0f7f91af8c78b1b47167fb039fc Mon Sep 17 00:00:00 2001 From: "Alina (Xi) Li" Date: Thu, 12 Jun 2025 13:11:15 -0700 Subject: [PATCH 14/63] Fix connection issues to DBT Labs PopulateCallOptions before making a connection Fix dsn window bug with advance properties Fix seg fault issue from empty string --- .../arrow/flight/sql/odbc/flight_sql/config/configuration.cc | 4 ++-- .../arrow/flight/sql/odbc/flight_sql/flight_sql_connection.cc | 3 ++- .../odbc/flight_sql/include/flight_sql/config/configuration.h | 2 +- .../flight/sql/odbc/flight_sql/ui/dsn_configuration_window.cc | 2 +- .../include/odbcabstraction/odbc_impl/encoding_utils.h | 2 +- 5 files changed, 7 insertions(+), 6 deletions(-) diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/config/configuration.cc b/cpp/src/arrow/flight/sql/odbc/flight_sql/config/configuration.cc index bfd050e724b..7cebebe56eb 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/config/configuration.cc +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/config/configuration.cc @@ -170,12 +170,12 @@ const driver::odbcabstraction::Connection::ConnPropertyMap& Configuration::GetPr return this->properties; } -std::vector Configuration::GetCustomKeys() const { +std::vector Configuration::GetCustomKeys() const { driver::odbcabstraction::Connection::ConnPropertyMap copyProps(properties); for (auto& key : FlightSqlConnection::ALL_KEYS) { copyProps.erase(std::string(key)); } - std::vector keys; + std::vector keys; boost::copy(copyProps | boost::adaptors::map_keys, std::back_inserter(keys)); return keys; } diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_connection.cc b/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_connection.cc index 708ac2f81a4..54e712feffb 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_connection.cc +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_connection.cc @@ -175,6 +175,8 @@ void FlightSqlConnection::Connect(const ConnPropertyMap& properties, std::unique_ptr flight_client; ThrowIfNotOK(FlightClient::Connect(location, client_options).Value(&flight_client)); + PopulateCallOptions(properties); + std::unique_ptr auth_method = FlightSqlAuthMethod::FromProperties(flight_client, properties); auth_method->Authenticate(*this, call_options_); @@ -190,7 +192,6 @@ void FlightSqlConnection::Connect(const ConnPropertyMap& properties, attribute_[CONNECTION_DEAD] = static_cast(SQL_FALSE); PopulateMetadataSettings(properties); - PopulateCallOptions(properties); } catch (...) { attribute_[CONNECTION_DEAD] = static_cast(SQL_TRUE); sql_client_.reset(); diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/include/flight_sql/config/configuration.h b/cpp/src/arrow/flight/sql/odbc/flight_sql/include/flight_sql/config/configuration.h index c7c9cc5b894..8c4d6865505 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/include/flight_sql/config/configuration.h +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/include/flight_sql/config/configuration.h @@ -59,7 +59,7 @@ class Configuration { */ const driver::odbcabstraction::Connection::ConnPropertyMap& GetProperties() const; - std::vector GetCustomKeys() const; + std::vector GetCustomKeys() const; private: driver::odbcabstraction::Connection::ConnPropertyMap properties; diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/ui/dsn_configuration_window.cc b/cpp/src/arrow/flight/sql/odbc/flight_sql/ui/dsn_configuration_window.cc index c47984ca400..e469b6b067f 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/ui/dsn_configuration_window.cc +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/ui/dsn_configuration_window.cc @@ -306,7 +306,7 @@ int DsnConfigurationWindow::CreatePropertiesGroup(int posX, int posY, int sizeX) const auto keys = config.GetCustomKeys(); for (const auto& key : keys) { - propertyList->ListAddItem({std::string(key), config.Get(key)}); + propertyList->ListAddItem({key, config.Get(key)}); } SendMessage(propertyList->GetHandle(), LVM_SETEXTENDEDLISTVIEWSTYLE, diff --git a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/encoding_utils.h b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/encoding_utils.h index 45ed8713626..3d6a80f835d 100644 --- a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/encoding_utils.h +++ b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/encoding_utils.h @@ -83,7 +83,7 @@ inline size_t ConvertToSqlWChar(const std::string_view& str, SQLWCHAR* buffer, /// \param[in] msg_len Number of characters in wchar_msg /// \return wchar_msg in std::string format inline std::string SqlWcharToString(SQLWCHAR* wchar_msg, SQLSMALLINT msg_len = SQL_NTS) { - if (!wchar_msg) { + if (!wchar_msg || wchar_msg[0] == 0) { return std::string(); } From 5ebbb8c858445589066164a38bfba0ac0400dfcc Mon Sep 17 00:00:00 2001 From: rscales Date: Thu, 12 Jun 2025 15:44:23 -0700 Subject: [PATCH 15/63] Implement SQLAllocStmt --- cpp/src/arrow/flight/sql/odbc/entry_points.cc | 8 ++ cpp/src/arrow/flight/sql/odbc/odbc_api.cc | 76 +++++++++++++++- cpp/src/arrow/flight/sql/odbc/odbc_api.h | 1 + .../flight/sql/odbc/tests/connection_test.cc | 87 +++++++++++++++++++ .../flight/sql/odbc/tests/odbc_test_suite.cc | 12 ++- 5 files changed, 180 insertions(+), 4 deletions(-) diff --git a/cpp/src/arrow/flight/sql/odbc/entry_points.cc b/cpp/src/arrow/flight/sql/odbc/entry_points.cc index 912a544e22d..ef6aa2e5fd0 100644 --- a/cpp/src/arrow/flight/sql/odbc/entry_points.cc +++ b/cpp/src/arrow/flight/sql/odbc/entry_points.cc @@ -41,6 +41,10 @@ SQLRETURN SQL_API SQLAllocConnect(SQLHENV env, SQLHDBC* conn) { return arrow::SQLAllocHandle(SQL_HANDLE_DBC, env, conn); } +SQLRETURN SQL_API SQLAllocStmt(SQLHDBC conn, SQLHSTMT* stmt) { + return arrow::SQLAllocHandle(SQL_HANDLE_STMT, conn, stmt); +} + SQLRETURN SQL_API SQLFreeHandle(SQLSMALLINT type, SQLHANDLE handle) { return arrow::SQLFreeHandle(type, handle); } @@ -53,6 +57,10 @@ SQLRETURN SQL_API SQLFreeConnect(SQLHDBC conn) { return arrow::SQLFreeHandle(SQL_HANDLE_DBC, conn); } +SQLRETURN SQL_API SQLFreeStmt(SQLHSTMT stmt, SQLUSMALLINT option) { + return arrow::SQLFreeStmt(stmt, option); +} + SQLRETURN SQL_API SQLGetDiagFieldW(SQLSMALLINT handleType, SQLHANDLE handle, SQLSMALLINT recNumber, SQLSMALLINT diagIdentifier, SQLPOINTER diagInfoPtr, SQLSMALLINT bufferLength, diff --git a/cpp/src/arrow/flight/sql/odbc/odbc_api.cc b/cpp/src/arrow/flight/sql/odbc/odbc_api.cc index b3deb11bbb7..9c734e072a9 100644 --- a/cpp/src/arrow/flight/sql/odbc/odbc_api.cc +++ b/cpp/src/arrow/flight/sql/odbc/odbc_api.cc @@ -26,6 +26,7 @@ #include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/encoding_utils.h" #include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/odbc_connection.h" #include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/odbc_environment.h" +#include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/odbc_statement.h" #include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/spi/connection.h" #if defined _WIN32 || defined _WIN64 @@ -85,9 +86,30 @@ SQLRETURN SQLAllocHandle(SQLSMALLINT type, SQLHANDLE parent, SQLHANDLE* result) } case SQL_HANDLE_STMT: { - return SQL_INVALID_HANDLE; + using ODBC::ODBCConnection; + using ODBC::ODBCStatement; + + *result = SQL_NULL_HSTMT; + + ODBCConnection* connection = reinterpret_cast(parent); + + return ODBCConnection::ExecuteWithDiagnostics(connection, SQL_ERROR, [=]() { + std::shared_ptr statement = connection->createStatement(); + + if (statement) { + *result = reinterpret_cast(statement.get()); + + return SQL_SUCCESS; + } + + return SQL_ERROR; + }); } + // TODO Implement for case of descriptor + case SQL_HANDLE_DESC: + return SQL_INVALID_HANDLE; + default: break; } @@ -127,8 +149,19 @@ SQLRETURN SQLFreeHandle(SQLSMALLINT type, SQLHANDLE handle) { return SQL_SUCCESS; } - case SQL_HANDLE_STMT: - return SQL_INVALID_HANDLE; + case SQL_HANDLE_STMT: { + using ODBC::ODBCStatement; + + ODBCStatement* statement = reinterpret_cast(handle); + + if (!statement) { + return SQL_INVALID_HANDLE; + } + + statement->releaseStatement(); + + return SQL_SUCCESS; + } case SQL_HANDLE_DESC: return SQL_INVALID_HANDLE; @@ -140,6 +173,43 @@ SQLRETURN SQLFreeHandle(SQLSMALLINT type, SQLHANDLE handle) { return SQL_ERROR; } +SQLRETURN SQLFreeStmt(SQLHSTMT handle, SQLUSMALLINT option) { + switch (option) { + case SQL_CLOSE: { + using ODBC::ODBCStatement; + + ODBCStatement* statement = reinterpret_cast(handle); + + return ODBCStatement::ExecuteWithDiagnostics(statement, SQL_ERROR, [=]() { + if (!statement) { + return SQL_INVALID_HANDLE; + } + + // Close cursor with suppressErrors set to true + statement->closeCursor(true); + + return SQL_SUCCESS; + }); + } + + case SQL_DROP: { + return SQLFreeHandle(SQL_HANDLE_STMT, handle); + } + + // TODO Implement SQLBindCol + case SQL_UNBIND: { + return SQL_SUCCESS; + } + + // SQLBindParameter is not supported + case SQL_RESET_PARAMS: { + return SQL_SUCCESS; + } + } + + return SQL_ERROR; +} + inline bool IsValidStringFieldArgs(SQLPOINTER diagInfoPtr, SQLSMALLINT bufferLength, SQLSMALLINT* stringLengthPtr, bool isUnicode) { const SQLSMALLINT charSize = isUnicode ? GetSqlWCharSize() : sizeof(char); diff --git a/cpp/src/arrow/flight/sql/odbc/odbc_api.h b/cpp/src/arrow/flight/sql/odbc/odbc_api.h index d5c392bcf59..9350cead384 100644 --- a/cpp/src/arrow/flight/sql/odbc/odbc_api.h +++ b/cpp/src/arrow/flight/sql/odbc/odbc_api.h @@ -31,6 +31,7 @@ namespace arrow { SQLRETURN SQLAllocHandle(SQLSMALLINT type, SQLHANDLE parent, SQLHANDLE* result); SQLRETURN SQLFreeHandle(SQLSMALLINT type, SQLHANDLE handle); +SQLRETURN SQLFreeStmt(SQLHSTMT stmt, SQLUSMALLINT option); SQLRETURN SQLGetDiagFieldW(SQLSMALLINT handleType, SQLHANDLE handle, SQLSMALLINT recNumber, SQLSMALLINT diagIdentifier, SQLPOINTER diagInfoPtr, SQLSMALLINT bufferLength, diff --git a/cpp/src/arrow/flight/sql/odbc/tests/connection_test.cc b/cpp/src/arrow/flight/sql/odbc/tests/connection_test.cc index 8461ead6cf9..176129ba627 100644 --- a/cpp/src/arrow/flight/sql/odbc/tests/connection_test.cc +++ b/cpp/src/arrow/flight/sql/odbc/tests/connection_test.cc @@ -991,6 +991,93 @@ TYPED_TEST(FlightSQLODBCTestBase, TestConnect) { this->disconnect(); } +TYPED_TEST(FlightSQLODBCTestBase, TestSQLAllocFreeStmt) { + this->connect(); + SQLHSTMT statement; + + // Allocate a statement using alloc statement + SQLRETURN ret = SQLAllocStmt(this->conn, &statement); + + EXPECT_TRUE(ret == SQL_SUCCESS); + + // TODO Uncomment once SQLExecDirect is implemented + // SQLWCHAR sql_buffer[ODBC_BUFFER_SIZE] = L"SELECT 1"; + // ret = SQLExecDirect(statement, sql_buffer, SQL_NTS); + + // EXPECT_TRUE(ret == SQL_SUCCESS); + + // ret = SQLFreeStmt(statement, SQL_CLOSE); + + // EXPECT_TRUE(ret == SQL_SUCCESS); + + // Free statement handle + ret = SQLFreeStmt(statement, SQL_DROP); + + EXPECT_TRUE(ret == SQL_SUCCESS); + + this->disconnect(); +} + +TYPED_TEST(FlightSQLODBCTestBase, TestCloseConnectionWithOpenStatement) { + // Test is disabled as disconnecting without closing statement fails on Windows. + // This test case can be potentially used on macOS/Linux. + GTEST_SKIP(); + // ODBC Environment + SQLHENV env; + SQLHDBC conn; + SQLHSTMT statement; + + // Allocate an environment handle + SQLRETURN ret = SQLAllocEnv(&env); + + EXPECT_TRUE(ret == SQL_SUCCESS); + + ret = SQLSetEnvAttr(env, SQL_ATTR_ODBC_VERSION, (void*)SQL_OV_ODBC3, 0); + + EXPECT_TRUE(ret == SQL_SUCCESS); + + // Allocate a connection using alloc handle + ret = SQLAllocHandle(SQL_HANDLE_DBC, env, &conn); + + EXPECT_TRUE(ret == SQL_SUCCESS); + + // Connect string + std::string connect_str = this->getConnectionString(); + ASSERT_OK_AND_ASSIGN(std::wstring wconnect_str, + arrow::util::UTF8ToWideString(connect_str)); + std::vector connect_str0(wconnect_str.begin(), wconnect_str.end()); + + SQLWCHAR outstr[ODBC_BUFFER_SIZE] = L""; + SQLSMALLINT outstrlen; + + // Connecting to ODBC server. + ret = SQLDriverConnect(conn, NULL, &connect_str0[0], + static_cast(connect_str0.size()), outstr, + ODBC_BUFFER_SIZE, &outstrlen, SQL_DRIVER_NOPROMPT); + + EXPECT_TRUE(ret == SQL_SUCCESS); + + // Allocate a statement using alloc statement + ret = SQLAllocStmt(conn, &statement); + + EXPECT_TRUE(ret == SQL_SUCCESS); + + // Disconnect from ODBC without closing the statement first + ret = SQLDisconnect(conn); + + EXPECT_TRUE(ret == SQL_SUCCESS); + + // Free connection handle + ret = SQLFreeHandle(SQL_HANDLE_DBC, conn); + + EXPECT_TRUE(ret == SQL_SUCCESS); + + // Free environment handle + ret = SQLFreeHandle(SQL_HANDLE_ENV, env); + + EXPECT_TRUE(ret == SQL_SUCCESS); +} + } // namespace integration_tests } // namespace odbc } // namespace flight diff --git a/cpp/src/arrow/flight/sql/odbc/tests/odbc_test_suite.cc b/cpp/src/arrow/flight/sql/odbc/tests/odbc_test_suite.cc index 656c221a1d1..c079c3c175c 100644 --- a/cpp/src/arrow/flight/sql/odbc/tests/odbc_test_suite.cc +++ b/cpp/src/arrow/flight/sql/odbc/tests/odbc_test_suite.cc @@ -65,11 +65,21 @@ void FlightSQLODBCRemoteTestBase::connectWithString(std::string connect_str) { // Assert connection is successful before we continue ASSERT_TRUE(ret == SQL_SUCCESS); + + // Allocate a statement using alloc handle + ret = SQLAllocHandle(SQL_HANDLE_STMT, conn, &stmt); + + ASSERT_TRUE(ret == SQL_SUCCESS); } void FlightSQLODBCRemoteTestBase::disconnect() { + // Close statement + SQLRETURN ret = SQLFreeHandle(SQL_HANDLE_STMT, stmt); + + EXPECT_TRUE(ret == SQL_SUCCESS); + // Disconnect from ODBC - SQLRETURN ret = SQLDisconnect(conn); + ret = SQLDisconnect(conn); if (ret != SQL_SUCCESS) { std::cerr << GetOdbcErrorMessage(SQL_HANDLE_DBC, conn) << std::endl; From 6da69329994fff06b8e8b0adae9e0a0c7b2ae3e9 Mon Sep 17 00:00:00 2001 From: "Alina (Xi) Li" Date: Fri, 13 Jun 2025 10:39:51 -0700 Subject: [PATCH 16/63] Follow-up DBT Labs connection fix --- .../flight/sql/odbc/flight_sql/flight_sql_auth_method.cc | 9 ++++++--- .../flight/sql/odbc/flight_sql/flight_sql_connection.cc | 3 +-- 2 files changed, 7 insertions(+), 5 deletions(-) diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_auth_method.cc b/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_auth_method.cc index 4b66c30dab0..6c265673837 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_auth_method.cc +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_auth_method.cc @@ -107,7 +107,9 @@ class UserPasswordAuthMethod : public FlightSqlAuthMethod { throw odbcabstraction::DriverException(bearer_result.status().message()); } - call_options.headers.push_back(bearer_result.ValueOrDie()); + // call_options may have already been populated with data from the connection string + // or DSN. Ensure auth-generated headers are placed at the front of the header list. + call_options.headers.insert(call_options.headers.begin(), bearer_result.ValueOrDie()); } std::string GetUser() override { return user_; } @@ -129,10 +131,11 @@ class TokenAuthMethod : public FlightSqlAuthMethod { void Authenticate(FlightSqlConnection& connection, FlightCallOptions& call_options) override { - // add the token to the headers + // add the token to the front of the headers. For consistency auth headers should be + // at the front. const std::pair token_header("authorization", "Bearer " + token_); - call_options.headers.push_back(token_header); + call_options.headers.insert(call_options.headers.begin(), token_header); const arrow::Status status = client_.Authenticate( call_options, diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_connection.cc b/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_connection.cc index 54e712feffb..422316a9f8b 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_connection.cc +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_connection.cc @@ -175,6 +175,7 @@ void FlightSqlConnection::Connect(const ConnPropertyMap& properties, std::unique_ptr flight_client; ThrowIfNotOK(FlightClient::Connect(location, client_options).Value(&flight_client)); + PopulateMetadataSettings(properties); PopulateCallOptions(properties); std::unique_ptr auth_method = @@ -190,8 +191,6 @@ void FlightSqlConnection::Connect(const ConnPropertyMap& properties, info_.SetProperty(SQL_USER_NAME, auth_method->GetUser()); attribute_[CONNECTION_DEAD] = static_cast(SQL_FALSE); - - PopulateMetadataSettings(properties); } catch (...) { attribute_[CONNECTION_DEAD] = static_cast(SQL_TRUE); sql_client_.reset(); From 2a4107130ced58ff91b378b7d584a8c086fb68e0 Mon Sep 17 00:00:00 2001 From: rscales Date: Mon, 16 Jun 2025 12:31:16 -0700 Subject: [PATCH 17/63] Implement SQLGetDiag Rec and Field for statement --- cpp/src/arrow/flight/sql/odbc/odbc_api.cc | 55 +++++++++++++++++++---- 1 file changed, 47 insertions(+), 8 deletions(-) diff --git a/cpp/src/arrow/flight/sql/odbc/odbc_api.cc b/cpp/src/arrow/flight/sql/odbc/odbc_api.cc index 9c734e072a9..21d060ad70f 100644 --- a/cpp/src/arrow/flight/sql/odbc/odbc_api.cc +++ b/cpp/src/arrow/flight/sql/odbc/odbc_api.cc @@ -234,6 +234,7 @@ SQLRETURN SQLGetDiagFieldW(SQLSMALLINT handleType, SQLHANDLE handle, using ODBC::GetStringAttribute; using ODBC::ODBCConnection; using ODBC::ODBCEnvironment; + using ODBC::ODBCStatement; LOG_DEBUG( "SQLGetDiagFieldW called with handleType: {}, handle: {}, recNumber: {}, " @@ -277,7 +278,9 @@ SQLRETURN SQLGetDiagFieldW(SQLSMALLINT handleType, SQLHANDLE handle, } case SQL_HANDLE_STMT: { - return SQL_ERROR; + ODBCStatement* statement = reinterpret_cast(handle); + diagnostics = &statement->GetDiagnostics(); + break; } default: @@ -308,12 +311,44 @@ SQLRETURN SQLGetDiagFieldW(SQLSMALLINT handleType, SQLHANDLE handle, return SQL_SUCCESS; } - // TODO Implement statement header functions - case SQL_DIAG_CURSOR_ROW_COUNT: + case SQL_DIAG_CURSOR_ROW_COUNT: { + if (handleType == SQL_HANDLE_STMT) { + if (diagInfoPtr) { + // Will always be 0 if only select supported + *static_cast(diagInfoPtr) = 0; + } + + if (stringLengthPtr) { + *stringLengthPtr = sizeof(SQLLEN); + } + + return SQL_SUCCESS; + } + + return SQL_ERROR; + } + + // Not supported case SQL_DIAG_DYNAMIC_FUNCTION: - case SQL_DIAG_DYNAMIC_FUNCTION_CODE: + case SQL_DIAG_DYNAMIC_FUNCTION_CODE: { + if (handleType == SQL_HANDLE_STMT) { + return SQL_SUCCESS; + } + + return SQL_ERROR; + } + case SQL_DIAG_ROW_COUNT: { if (handleType == SQL_HANDLE_STMT) { + if (diagInfoPtr) { + // Will always be 0 if only select supported + *static_cast(diagInfoPtr) = 0; + } + + if (stringLengthPtr) { + *stringLengthPtr = sizeof(SQLLEN); + } + return SQL_SUCCESS; } @@ -372,8 +407,11 @@ SQLRETURN SQLGetDiagFieldW(SQLSMALLINT handleType, SQLHANDLE handle, } case SQL_HANDLE_STMT: { - // TODO Implement for case of statement - return SQL_ERROR; + ODBCStatement* statement = reinterpret_cast(handle); + ODBCConnection* connection = &statement->GetConnection(); + std::string dsn = connection->GetDSN(); + return GetStringAttribute(isUnicode, dsn, true, diagInfoPtr, bufferLength, + stringLengthPtr, *diagnostics); } default: @@ -445,10 +483,10 @@ SQLRETURN SQLGetDiagRecW(SQLSMALLINT handleType, SQLHANDLE handle, SQLSMALLINT r SQLWCHAR* messageText, SQLSMALLINT bufferLength, SQLSMALLINT* textLengthPtr) { using driver::odbcabstraction::Diagnostics; - using ODBC::ConvertToSqlWChar; using ODBC::GetStringAttribute; using ODBC::ODBCConnection; using ODBC::ODBCEnvironment; + using ODBC::ODBCStatement; LOG_DEBUG( "SQLGetDiagRecW called with handleType: {}, handle: {}, recNumber: {}, " @@ -488,7 +526,8 @@ SQLRETURN SQLGetDiagRecW(SQLSMALLINT handleType, SQLHANDLE handle, SQLSMALLINT r } case SQL_HANDLE_STMT: { - return SQL_ERROR; + auto* statement = ODBCStatement::of(handle); + diagnostics = &statement->GetDiagnostics(); } default: From 45719f426549f12fc69166a221edefecd2f56848 Mon Sep 17 00:00:00 2001 From: "Alina (Xi) Li" <96995091+alinaliBQ@users.noreply.github.com> Date: Mon, 16 Jun 2025 16:18:45 -0700 Subject: [PATCH 18/63] Unicode support for DSN ODBC APIs * Let compiler append `W` to ODBC APIs where applicable. --- cpp/src/arrow/flight/sql/odbc/CMakeLists.txt | 2 + cpp/src/arrow/flight/sql/odbc/entry_points.cc | 155 ++++++++--------- .../odbc/flight_sql/config/configuration.cc | 48 +++-- .../odbc/flight_sql/flight_sql_connection.cc | 2 +- .../include/flight_sql/config/configuration.h | 1 + .../flight_sql/ui/add_property_window.h | 6 +- .../include/flight_sql/ui/custom_window.h | 2 +- .../flight_sql/include/flight_sql/ui/window.h | 32 ++-- .../flight/sql/odbc/flight_sql/system_dsn.cc | 25 +-- .../flight/sql/odbc/flight_sql/system_dsn.h | 4 +- .../sql/odbc/flight_sql/system_trust_store.cc | 15 +- .../sql/odbc/flight_sql/system_trust_store.h | 4 +- .../odbc/flight_sql/ui/add_property_window.cc | 18 +- .../sql/odbc/flight_sql/ui/custom_window.cc | 2 +- .../flight_sql/ui/dsn_configuration_window.cc | 164 ++++++++++-------- .../flight/sql/odbc/flight_sql/ui/window.cc | 58 +++---- .../sql/odbc/flight_sql/win_system_dsn.cc | 35 ++-- cpp/src/arrow/flight/sql/odbc/odbc.def | 2 +- cpp/src/arrow/flight/sql/odbc/odbc_api.cc | 43 +++-- cpp/src/arrow/flight/sql/odbc/odbc_api.h | 39 ++--- .../odbc_impl/odbc_connection.cc | 35 ++-- .../flight/sql/odbc/tests/CMakeLists.txt | 2 - .../flight/sql/odbc/tests/connection_test.cc | 14 +- .../flight/sql/odbc/tests/odbc_test_suite.cc | 4 +- 24 files changed, 383 insertions(+), 329 deletions(-) diff --git a/cpp/src/arrow/flight/sql/odbc/CMakeLists.txt b/cpp/src/arrow/flight/sql/odbc/CMakeLists.txt index 449db0fedf4..2017fa512bf 100644 --- a/cpp/src/arrow/flight/sql/odbc/CMakeLists.txt +++ b/cpp/src/arrow/flight/sql/odbc/CMakeLists.txt @@ -32,6 +32,8 @@ else() set(ODBCINST odbcinst) endif() +add_definitions(-DUNICODE=1) + add_subdirectory(flight_sql) add_subdirectory(odbcabstraction) add_subdirectory(tests) diff --git a/cpp/src/arrow/flight/sql/odbc/entry_points.cc b/cpp/src/arrow/flight/sql/odbc/entry_points.cc index ef6aa2e5fd0..b397631992f 100644 --- a/cpp/src/arrow/flight/sql/odbc/entry_points.cc +++ b/cpp/src/arrow/flight/sql/odbc/entry_points.cc @@ -61,20 +61,20 @@ SQLRETURN SQL_API SQLFreeStmt(SQLHSTMT stmt, SQLUSMALLINT option) { return arrow::SQLFreeStmt(stmt, option); } -SQLRETURN SQL_API SQLGetDiagFieldW(SQLSMALLINT handleType, SQLHANDLE handle, - SQLSMALLINT recNumber, SQLSMALLINT diagIdentifier, - SQLPOINTER diagInfoPtr, SQLSMALLINT bufferLength, - SQLSMALLINT* stringLengthPtr) { - return arrow::SQLGetDiagFieldW(handleType, handle, recNumber, diagIdentifier, - diagInfoPtr, bufferLength, stringLengthPtr); +SQLRETURN SQL_API SQLGetDiagField(SQLSMALLINT handleType, SQLHANDLE handle, + SQLSMALLINT recNumber, SQLSMALLINT diagIdentifier, + SQLPOINTER diagInfoPtr, SQLSMALLINT bufferLength, + SQLSMALLINT* stringLengthPtr) { + return arrow::SQLGetDiagField(handleType, handle, recNumber, diagIdentifier, + diagInfoPtr, bufferLength, stringLengthPtr); } -SQLRETURN SQL_API SQLGetDiagRecW(SQLSMALLINT handleType, SQLHANDLE handle, - SQLSMALLINT recNumber, SQLWCHAR* sqlState, - SQLINTEGER* nativeErrorPtr, SQLWCHAR* messageText, - SQLSMALLINT bufferLength, SQLSMALLINT* textLengthPtr) { - return arrow::SQLGetDiagRecW(handleType, handle, recNumber, sqlState, nativeErrorPtr, - messageText, bufferLength, textLengthPtr); +SQLRETURN SQL_API SQLGetDiagRec(SQLSMALLINT handleType, SQLHANDLE handle, + SQLSMALLINT recNumber, SQLWCHAR* sqlState, + SQLINTEGER* nativeErrorPtr, SQLWCHAR* messageText, + SQLSMALLINT bufferLength, SQLSMALLINT* textLengthPtr) { + return arrow::SQLGetDiagRec(handleType, handle, recNumber, sqlState, nativeErrorPtr, + messageText, bufferLength, textLengthPtr); } SQLRETURN SQL_API SQLGetEnvAttr(SQLHENV env, SQLINTEGER attr, SQLPOINTER valuePtr, @@ -87,36 +87,35 @@ SQLRETURN SQL_API SQLSetEnvAttr(SQLHENV env, SQLINTEGER attr, SQLPOINTER valuePt return arrow::SQLSetEnvAttr(env, attr, valuePtr, strLen); } -SQLRETURN SQL_API SQLSetConnectAttrW(SQLHDBC conn, SQLINTEGER attr, SQLPOINTER value, - SQLINTEGER valueLen) { +SQLRETURN SQL_API SQLSetConnectAttr(SQLHDBC conn, SQLINTEGER attr, SQLPOINTER value, + SQLINTEGER valueLen) { LOG_DEBUG("SQLSetConnectAttrW called with conn: {}, attr: {}, value: {}, valueLen: {}", conn, attr, value, valueLen); return SQL_ERROR; } -SQLRETURN SQL_API SQLGetInfoW(SQLHDBC conn, SQLUSMALLINT infoType, - SQLPOINTER infoValuePtr, SQLSMALLINT bufLen, - SQLSMALLINT* length) { - return arrow::SQLGetInfoW(conn, infoType, infoValuePtr, bufLen, length); +SQLRETURN SQL_API SQLGetInfo(SQLHDBC conn, SQLUSMALLINT infoType, SQLPOINTER infoValuePtr, + SQLSMALLINT bufLen, SQLSMALLINT* length) { + return arrow::SQLGetInfo(conn, infoType, infoValuePtr, bufLen, length); } -SQLRETURN SQL_API SQLDriverConnectW(SQLHDBC conn, SQLHWND windowHandle, - SQLWCHAR* inConnectionString, - SQLSMALLINT inConnectionStringLen, - SQLWCHAR* outConnectionString, - SQLSMALLINT outConnectionStringBufferLen, - SQLSMALLINT* outConnectionStringLen, - SQLUSMALLINT driverCompletion) { - return arrow::SQLDriverConnectW( +SQLRETURN SQL_API SQLDriverConnect(SQLHDBC conn, SQLHWND windowHandle, + SQLWCHAR* inConnectionString, + SQLSMALLINT inConnectionStringLen, + SQLWCHAR* outConnectionString, + SQLSMALLINT outConnectionStringBufferLen, + SQLSMALLINT* outConnectionStringLen, + SQLUSMALLINT driverCompletion) { + return arrow::SQLDriverConnect( conn, windowHandle, inConnectionString, inConnectionStringLen, outConnectionString, outConnectionStringBufferLen, outConnectionStringLen, driverCompletion); } -SQLRETURN SQL_API SQLConnectW(SQLHDBC conn, SQLWCHAR* dsnName, SQLSMALLINT dsnNameLen, - SQLWCHAR* userName, SQLSMALLINT userNameLen, - SQLWCHAR* password, SQLSMALLINT passwordLen) { - return arrow::SQLConnectW(conn, dsnName, dsnNameLen, userName, userNameLen, password, - passwordLen); +SQLRETURN SQL_API SQLConnect(SQLHDBC conn, SQLWCHAR* dsnName, SQLSMALLINT dsnNameLen, + SQLWCHAR* userName, SQLSMALLINT userNameLen, + SQLWCHAR* password, SQLSMALLINT passwordLen) { + return arrow::SQLConnect(conn, dsnName, dsnNameLen, userName, userNameLen, password, + passwordLen); } SQLRETURN SQL_API SQLDisconnect(SQLHDBC conn) { return arrow::SQLDisconnect(conn); } @@ -142,11 +141,11 @@ SQLRETURN SQL_API SQLCloseCursor(SQLHSTMT statementHandle) { return SQL_ERROR; } -SQLRETURN SQL_API SQLColAttributeW(SQLHSTMT statementHandle, SQLUSMALLINT columnNumber, - SQLUSMALLINT fieldIdentifier, - SQLPOINTER characterAttributePtr, - SQLSMALLINT bufferLength, SQLSMALLINT* stringLengthPtr, - SQLLEN* numericAttributePtr) { +SQLRETURN SQL_API SQLColAttribute(SQLHSTMT statementHandle, SQLUSMALLINT columnNumber, + SQLUSMALLINT fieldIdentifier, + SQLPOINTER characterAttributePtr, + SQLSMALLINT bufferLength, SQLSMALLINT* stringLengthPtr, + SQLLEN* numericAttributePtr) { LOG_DEBUG( "SQLColAttributeW called with statementHandle: {}, columnNumber: {}, " "fieldIdentifier: {}, characterAttributePtr: {}, bufferLength: {}, " @@ -156,11 +155,11 @@ SQLRETURN SQL_API SQLColAttributeW(SQLHSTMT statementHandle, SQLUSMALLINT column return SQL_ERROR; } -SQLRETURN SQL_API SQLColumnsW(SQLHSTMT statementHandle, SQLWCHAR* catalogName, - SQLSMALLINT catalogNameLength, SQLWCHAR* schemaName, - SQLSMALLINT schemaNameLength, SQLWCHAR* tableName, - SQLSMALLINT tableNameLength, SQLWCHAR* columnName, - SQLSMALLINT columnNameLength) { +SQLRETURN SQL_API SQLColumns(SQLHSTMT statementHandle, SQLWCHAR* catalogName, + SQLSMALLINT catalogNameLength, SQLWCHAR* schemaName, + SQLSMALLINT schemaNameLength, SQLWCHAR* tableName, + SQLSMALLINT tableNameLength, SQLWCHAR* columnName, + SQLSMALLINT columnNameLength) { LOG_DEBUG( "SQLColumnsW called with statementHandle: {}, catalogName: {}, catalogNameLength: " "{}, " @@ -173,10 +172,10 @@ SQLRETURN SQL_API SQLColumnsW(SQLHSTMT statementHandle, SQLWCHAR* catalogName, return SQL_ERROR; } -SQLRETURN SQL_API SQLErrorW(SQLHENV handleType, SQLHDBC handle, SQLHSTMT hstmt, - SQLWCHAR FAR* szSqlState, SQLINTEGER FAR* pfNativeError, - SQLWCHAR FAR* szErrorMsg, SQLSMALLINT cbErrorMsgMax, - SQLSMALLINT FAR* pcbErrorMsg) { +SQLRETURN SQL_API SQLError(SQLHENV handleType, SQLHDBC handle, SQLHSTMT hstmt, + SQLWCHAR FAR* szSqlState, SQLINTEGER FAR* pfNativeError, + SQLWCHAR FAR* szErrorMsg, SQLSMALLINT cbErrorMsgMax, + SQLSMALLINT FAR* pcbErrorMsg) { LOG_DEBUG( "SQLErrorW called with handleType: {}, handle: {}, hstmt: {}, szSqlState: {}, " "pfNativeError: {}, szErrorMsg: {}, cbErrorMsgMax: {}, pcbErrorMsg: {}", @@ -185,8 +184,8 @@ SQLRETURN SQL_API SQLErrorW(SQLHENV handleType, SQLHDBC handle, SQLHSTMT hstmt, return SQL_ERROR; } -SQLRETURN SQL_API SQLExecDirectW(SQLHSTMT statementHandle, SQLWCHAR* statementText, - SQLINTEGER textLength) { +SQLRETURN SQL_API SQLExecDirect(SQLHSTMT statementHandle, SQLWCHAR* statementText, + SQLINTEGER textLength) { LOG_DEBUG( "SQLExecDirectW called with statementHandle: {}, statementText: {}, textLength: {}", statementHandle, fmt::ptr(statementText), textLength); @@ -203,13 +202,13 @@ SQLRETURN SQL_API SQLFetch(SQLHSTMT statementHandle) { return SQL_ERROR; } -SQLRETURN SQL_API SQLForeignKeysW(SQLHSTMT statementHandle, SQLWCHAR* pKCatalogName, - SQLSMALLINT pKCatalogNameLength, SQLWCHAR* pKSchemaName, - SQLSMALLINT pKSchemaNameLength, SQLWCHAR* pKTableName, - SQLSMALLINT pKTableNameLength, SQLWCHAR* fKCatalogName, - SQLSMALLINT fKCatalogNameLength, SQLWCHAR* fKSchemaName, - SQLSMALLINT fKSchemaNameLength, SQLWCHAR* fKTableName, - SQLSMALLINT fKTableNameLength) { +SQLRETURN SQL_API SQLForeignKeys(SQLHSTMT statementHandle, SQLWCHAR* pKCatalogName, + SQLSMALLINT pKCatalogNameLength, SQLWCHAR* pKSchemaName, + SQLSMALLINT pKSchemaNameLength, SQLWCHAR* pKTableName, + SQLSMALLINT pKTableNameLength, SQLWCHAR* fKCatalogName, + SQLSMALLINT fKCatalogNameLength, SQLWCHAR* fKSchemaName, + SQLSMALLINT fKSchemaNameLength, SQLWCHAR* fKTableName, + SQLSMALLINT fKTableNameLength) { LOG_DEBUG( "SQLForeignKeysW called with statementHandle: {}, pKCatalogName: {}, " "pKCatalogNameLength: " @@ -226,9 +225,9 @@ SQLRETURN SQL_API SQLForeignKeysW(SQLHSTMT statementHandle, SQLWCHAR* pKCatalogN return SQL_ERROR; } -SQLRETURN SQL_API SQLGetConnectAttrW(SQLHDBC connectionHandle, SQLINTEGER attribute, - SQLPOINTER valuePtr, SQLINTEGER bufferLength, - SQLINTEGER* stringLengthPtr) { +SQLRETURN SQL_API SQLGetConnectAttr(SQLHDBC connectionHandle, SQLINTEGER attribute, + SQLPOINTER valuePtr, SQLINTEGER bufferLength, + SQLINTEGER* stringLengthPtr) { LOG_DEBUG( "SQLGetConnectAttrW called with connectionHandle: {}, attribute: {}, valuePtr: {}, " "bufferLength: {}, stringLengthPtr: {}", @@ -247,9 +246,9 @@ SQLRETURN SQL_API SQLGetData(SQLHSTMT statementHandle, SQLUSMALLINT col_or_Param return SQL_ERROR; } -SQLRETURN SQL_API SQLGetStmtAttrW(SQLHSTMT statementHandle, SQLINTEGER attribute, - SQLPOINTER valuePtr, SQLINTEGER bufferLength, - SQLINTEGER* stringLengthPtr) { +SQLRETURN SQL_API SQLGetStmtAttr(SQLHSTMT statementHandle, SQLINTEGER attribute, + SQLPOINTER valuePtr, SQLINTEGER bufferLength, + SQLINTEGER* stringLengthPtr) { LOG_DEBUG( "SQLGetStmtAttrW called with statementHandle: {}, attribute: {}, valuePtr: {}, " "bufferLength: {}, stringLengthPtr: {}", @@ -257,7 +256,7 @@ SQLRETURN SQL_API SQLGetStmtAttrW(SQLHSTMT statementHandle, SQLINTEGER attribute return SQL_ERROR; } -SQLRETURN SQL_API SQLGetTypeInfoW(SQLHSTMT statementHandle, SQLSMALLINT dataType) { +SQLRETURN SQL_API SQLGetTypeInfo(SQLHSTMT statementHandle, SQLSMALLINT dataType) { LOG_DEBUG("SQLGetTypeInfoW called with statementHandle: {} dataType: {}", statementHandle, dataType); return SQL_ERROR; @@ -268,10 +267,10 @@ SQLRETURN SQL_API SQLMoreResults(SQLHSTMT statementHandle) { return SQL_ERROR; } -SQLRETURN SQL_API SQLNativeSqlW(SQLHDBC connectionHandle, SQLWCHAR* inStatementText, - SQLINTEGER inStatementTextLength, - SQLWCHAR* outStatementText, SQLINTEGER bufferLength, - SQLINTEGER* outStatementTextLength) { +SQLRETURN SQL_API SQLNativeSql(SQLHDBC connectionHandle, SQLWCHAR* inStatementText, + SQLINTEGER inStatementTextLength, + SQLWCHAR* outStatementText, SQLINTEGER bufferLength, + SQLINTEGER* outStatementTextLength) { LOG_DEBUG( "SQLNativeSqlW called with connectionHandle: {}, inStatementText: {}, " "inStatementTextLength: " @@ -288,18 +287,18 @@ SQLRETURN SQL_API SQLNumResultCols(SQLHSTMT statementHandle, return SQL_ERROR; } -SQLRETURN SQL_API SQLPrepareW(SQLHSTMT statementHandle, SQLWCHAR* statementText, - SQLINTEGER textLength) { +SQLRETURN SQL_API SQLPrepare(SQLHSTMT statementHandle, SQLWCHAR* statementText, + SQLINTEGER textLength) { LOG_DEBUG( "SQLPrepareW called with statementHandle: {}, statementText: {}, textLength: {}", statementHandle, fmt::ptr(statementText), textLength); return SQL_ERROR; } -SQLRETURN SQL_API SQLPrimaryKeysW(SQLHSTMT statementHandle, SQLWCHAR* catalogName, - SQLSMALLINT catalogNameLength, SQLWCHAR* schemaName, - SQLSMALLINT schemaNameLength, SQLWCHAR* tableName, - SQLSMALLINT tableNameLength) { +SQLRETURN SQL_API SQLPrimaryKeys(SQLHSTMT statementHandle, SQLWCHAR* catalogName, + SQLSMALLINT catalogNameLength, SQLWCHAR* schemaName, + SQLSMALLINT schemaNameLength, SQLWCHAR* tableName, + SQLSMALLINT tableNameLength) { LOG_DEBUG( "SQLPrimaryKeysW called with statementHandle: {}, catalogName: {}, " "catalogNameLength: " @@ -309,8 +308,8 @@ SQLRETURN SQL_API SQLPrimaryKeysW(SQLHSTMT statementHandle, SQLWCHAR* catalogNam return SQL_ERROR; } -SQLRETURN SQL_API SQLSetStmtAttrW(SQLHSTMT statementHandle, SQLINTEGER attribute, - SQLPOINTER valuePtr, SQLINTEGER stringLength) { +SQLRETURN SQL_API SQLSetStmtAttr(SQLHSTMT statementHandle, SQLINTEGER attribute, + SQLPOINTER valuePtr, SQLINTEGER stringLength) { LOG_DEBUG( "SQLSetStmtAttrW called with statementHandle: {}, attribute: {}, valuePtr: {}, " "stringLength: {}", @@ -318,11 +317,11 @@ SQLRETURN SQL_API SQLSetStmtAttrW(SQLHSTMT statementHandle, SQLINTEGER attribute return SQL_ERROR; } -SQLRETURN SQL_API SQLTablesW(SQLHSTMT statementHandle, SQLWCHAR* catalogName, - SQLSMALLINT catalogNameLength, SQLWCHAR* schemaName, - SQLSMALLINT schemaNameLength, SQLWCHAR* tableName, - SQLSMALLINT tableNameLength, SQLWCHAR* tableType, - SQLSMALLINT tableTypeLength) { +SQLRETURN SQL_API SQLTables(SQLHSTMT statementHandle, SQLWCHAR* catalogName, + SQLSMALLINT catalogNameLength, SQLWCHAR* schemaName, + SQLSMALLINT schemaNameLength, SQLWCHAR* tableName, + SQLSMALLINT tableNameLength, SQLWCHAR* tableType, + SQLSMALLINT tableTypeLength) { LOG_DEBUG( "SQLTablesW called with statementHandle: {}, catalogName: {}, catalogNameLength: " "{}, " diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/config/configuration.cc b/cpp/src/arrow/flight/sql/odbc/flight_sql/config/configuration.cc index 7cebebe56eb..db18239c47a 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/config/configuration.cc +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/config/configuration.cc @@ -17,6 +17,8 @@ #include "arrow/flight/sql/odbc/flight_sql/include/flight_sql/config/configuration.h" #include "arrow/flight/sql/odbc/flight_sql/flight_sql_connection.h" +#include "arrow/result.h" +#include "arrow/util/utf8.h" #include #include @@ -27,7 +29,6 @@ namespace driver { namespace flight_sql { namespace config { - static const char DEFAULT_DSN[] = "Apache Arrow Flight SQL"; static const char DEFAULT_ENABLE_ENCRYPTION[] = TRUE_STR; static const char DEFAULT_USE_CERT_STORE[] = TRUE_STR; @@ -36,19 +37,27 @@ static const char DEFAULT_DISABLE_CERT_VERIFICATION[] = FALSE_STR; namespace { std::string ReadDsnString(const std::string& dsn, const std::string_view& key, const std::string& dflt = "") { + std::wstring wDsn = arrow::util::UTF8ToWideString(dsn).ValueOr(L""); + std::wstring wKey = arrow::util::UTF8ToWideString(key).ValueOr(L""); + std::wstring wDflt = arrow::util::UTF8ToWideString(dflt).ValueOr(L""); + #define BUFFER_SIZE (1024) - std::vector buf(BUFFER_SIZE); - int ret = SQLGetPrivateProfileString(dsn.c_str(), key.data(), dflt.c_str(), buf.data(), - static_cast(buf.size()), "ODBC.INI"); + std::vector buf(BUFFER_SIZE); + int ret = + SQLGetPrivateProfileString(wDsn.c_str(), wKey.c_str(), wDflt.c_str(), buf.data(), + static_cast(buf.size()), L"ODBC.INI"); if (ret > BUFFER_SIZE) { // If there wasn't enough space, try again with the right size buffer. buf.resize(ret + 1); - ret = SQLGetPrivateProfileString(dsn.c_str(), key.data(), dflt.c_str(), buf.data(), - static_cast(buf.size()), "ODBC.INI"); + ret = + SQLGetPrivateProfileString(wDsn.c_str(), wKey.c_str(), wDflt.c_str(), buf.data(), + static_cast(buf.size()), L"ODBC.INI"); } - return std::string(buf.data(), ret); + std::wstring wResult = std::wstring(buf.data(), ret); + std::string result = arrow::util::WideStringToUTF8(wResult).ValueOr(""); + return result; } void RemoveAllKnownKeys(std::vector& keys) { @@ -65,28 +74,32 @@ void RemoveAllKnownKeys(std::vector& keys) { } std::vector ReadAllKeys(const std::string& dsn) { - std::vector buf(BUFFER_SIZE); + std::wstring wDsn = arrow::util::UTF8ToWideString(dsn).ValueOr(L""); + + std::vector buf(BUFFER_SIZE); - int ret = SQLGetPrivateProfileString(dsn.c_str(), NULL, "", buf.data(), - static_cast(buf.size()), "ODBC.INI"); + int ret = SQLGetPrivateProfileString(wDsn.c_str(), NULL, L"", buf.data(), + static_cast(buf.size()), L"ODBC.INI"); if (ret > BUFFER_SIZE) { // If there wasn't enough space, try again with the right size buffer. buf.resize(ret + 1); - ret = SQLGetPrivateProfileString(dsn.c_str(), NULL, "", buf.data(), - static_cast(buf.size()), "ODBC.INI"); + ret = SQLGetPrivateProfileString(wDsn.c_str(), NULL, L"", buf.data(), + static_cast(buf.size()), L"ODBC.INI"); } // When you pass NULL to SQLGetPrivateProfileString it gives back a \0 delimited list of // all the keys. The below loop simply tokenizes all the keys and places them into a // vector. std::vector keys; - char* begin = buf.data(); + wchar_t* begin = buf.data(); while (begin && *begin != '\0') { - char* cur; + wchar_t* cur; for (cur = begin; *cur != '\0'; ++cur) { } - keys.emplace_back(begin, cur); + + std::string key = arrow::util::WideStringToUTF8(std::wstring(begin, cur)).ValueOr(""); + keys.emplace_back(key); begin = ++cur; } return keys; @@ -150,6 +163,11 @@ const std::string& Configuration::Get(const std::string_view& key) const { return itr->second; } +void Configuration::Set(const std::string_view& key, const std::wstring& wValue) { + std::string value = arrow::util::WideStringToUTF8(wValue).ValueOr(""); + Set(key, value); +} + void Configuration::Set(const std::string_view& key, const std::string& value) { const std::string copy = boost::trim_copy(value); if (!copy.empty()) { diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_connection.cc b/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_connection.cc index 422316a9f8b..6d5d95865ba 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_connection.cc +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_connection.cc @@ -83,7 +83,7 @@ namespace { #if _WIN32 || _WIN64 constexpr auto SYSTEM_TRUST_STORE_DEFAULT = true; -constexpr auto STORES = {"CA", "MY", "ROOT", "SPC"}; +constexpr auto STORES = {L"CA", L"MY", L"ROOT", L"SPC"}; inline std::string GetCerts() { std::string certs; diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/include/flight_sql/config/configuration.h b/cpp/src/arrow/flight/sql/odbc/flight_sql/include/flight_sql/config/configuration.h index 8c4d6865505..c94cc5b7832 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/include/flight_sql/config/configuration.h +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/include/flight_sql/config/configuration.h @@ -52,6 +52,7 @@ class Configuration { void Clear(); bool IsSet(const std::string_view& key) const; const std::string& Get(const std::string_view& key) const; + void Set(const std::string_view& key, const std::wstring& wValue); void Set(const std::string_view& key, const std::string& value); void Emplace(const std::string_view& key, std::string&& value); /** diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/include/flight_sql/ui/add_property_window.h b/cpp/src/arrow/flight/sql/odbc/flight_sql/include/flight_sql/ui/add_property_window.h index 01d93829a46..b7a8016447c 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/include/flight_sql/ui/add_property_window.h +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/include/flight_sql/ui/add_property_window.h @@ -70,7 +70,7 @@ class AddPropertyWindow : public CustomWindow { * * @return true if the dialog was OK'd, false otherwise. */ - bool GetProperty(std::string& key, std::string& value); + bool GetProperty(std::wstring& key, std::wstring& value); private: /** @@ -97,9 +97,9 @@ class AddPropertyWindow : public CustomWindow { std::unique_ptr valueEdit; - std::string key; + std::wstring key; - std::string value; + std::wstring value; /** Window width. */ int width; diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/include/flight_sql/ui/custom_window.h b/cpp/src/arrow/flight/sql/odbc/flight_sql/include/flight_sql/ui/custom_window.h index 0fc3737ed8b..649f0ef6547 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/include/flight_sql/ui/custom_window.h +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/include/flight_sql/ui/custom_window.h @@ -65,7 +65,7 @@ class CustomWindow : public Window { * @param className Window class name. * @param title Window title. */ - CustomWindow(Window* parent, const char* className, const char* title); + CustomWindow(Window* parent, const wchar_t* className, const wchar_t* title); /** * Destructor. diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/include/flight_sql/ui/window.h b/cpp/src/arrow/flight/sql/odbc/flight_sql/include/flight_sql/ui/window.h index e56ad88dec6..596ff47c577 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/include/flight_sql/ui/window.h +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/include/flight_sql/ui/window.h @@ -44,7 +44,7 @@ class Window { * @param className Window class name. * @param title Window title. */ - Window(Window* parent, const char* className, const char* title); + Window(Window* parent, const wchar_t* className, const wchar_t* title); /** * Constructor for the existing window. @@ -102,7 +102,7 @@ class Window { * @return Auto pointer containing new window. */ std::unique_ptr CreateGroupBox(int posX, int posY, int sizeX, int sizeY, - const char* title, int id); + const wchar_t* title, int id); /** * Create child label window. @@ -116,7 +116,7 @@ class Window { * @return Auto pointer containing new window. */ std::unique_ptr CreateLabel(int posX, int posY, int sizeX, int sizeY, - const char* title, int id); + const wchar_t* title, int id); /** * Create child Edit window. @@ -131,7 +131,7 @@ class Window { * @return Auto pointer containing new window. */ std::unique_ptr CreateEdit(int posX, int posY, int sizeX, int sizeY, - const char* title, int id, int style = 0); + const wchar_t* title, int id, int style = 0); /** * Create child button window. @@ -146,7 +146,7 @@ class Window { * @return Auto pointer containing new window. */ std::unique_ptr CreateButton(int posX, int posY, int sizeX, int sizeY, - const char* title, int id, int style = 0); + const wchar_t* title, int id, int style = 0); /** * Create child CheckBox window. @@ -161,7 +161,7 @@ class Window { * @return Auto pointer containing new window. */ std::unique_ptr CreateCheckBox(int posX, int posY, int sizeX, int sizeY, - const char* title, int id, bool state); + const wchar_t* title, int id, bool state); /** * Create child ComboBox window. @@ -175,7 +175,7 @@ class Window { * @return Auto pointer containing new window. */ std::unique_ptr CreateComboBox(int posX, int posY, int sizeX, int sizeY, - const char* title, int id); + const wchar_t* title, int id); /** * Show window. @@ -201,15 +201,15 @@ class Window { void SetVisible(bool isVisible); - void ListAddColumn(const std::string& name, int index, int width); + void ListAddColumn(const std::wstring& name, int index, int width); - void ListAddItem(const std::vector& items); + void ListAddItem(const std::vector& items); void ListDeleteSelectedItem(); - std::vector > ListGetAll(); + std::vector > ListGetAll(); - void AddTab(const std::string& name, int index); + void AddTab(const std::wstring& name, int index); bool IsTextEmpty() const; @@ -218,14 +218,14 @@ class Window { * * @param text Text. */ - void GetText(std::string& text) const; + void GetText(std::wstring& text) const; /** * Set window text. * * @param text Text. */ - void SetText(const std::string& text) const; + void SetText(const std::wstring& text) const; /** * Get CheckBox state. @@ -246,7 +246,7 @@ class Window { * * @param str String. */ - void AddString(const std::string& str); + void AddString(const std::wstring& str); /** * Set current ComboBox selection. @@ -285,10 +285,10 @@ class Window { void SetHandle(HWND value) { handle = value; } /** Window class name. */ - std::string className; + std::wstring className; /** Window title. */ - std::string title; + std::wstring title; /** Window handle. */ HWND handle; diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/system_dsn.cc b/cpp/src/arrow/flight/sql/odbc/flight_sql/system_dsn.cc index 95b47bdb1e2..f0006b36c9a 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/system_dsn.cc +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/system_dsn.cc @@ -19,6 +19,8 @@ #include "arrow/flight/sql/odbc/flight_sql/flight_sql_connection.h" #include "arrow/flight/sql/odbc/flight_sql/include/flight_sql/config/configuration.h" +#include "arrow/result.h" +#include "arrow/util/utf8.h" #include #include @@ -29,14 +31,14 @@ using driver::flight_sql::config::Configuration; void PostLastInstallerError() { #define BUFFER_SIZE (1024) DWORD code; - char msg[BUFFER_SIZE]; + wchar_t msg[BUFFER_SIZE]; SQLInstallerError(1, &code, msg, BUFFER_SIZE, NULL); - std::stringstream buf; - buf << "Message: \"" << msg << "\", Code: " << code; - std::string errorMsg = buf.str(); + std::wstringstream buf; + buf << L"Message: \"" << msg << L"\", Code: " << code; + std::wstring errorMsg = buf.str(); - MessageBox(NULL, errorMsg.c_str(), "Error!", MB_ICONEXCLAMATION | MB_OK); + MessageBox(NULL, errorMsg.c_str(), L"Error!", MB_ICONEXCLAMATION | MB_OK); SQLPostInstallerError(code, errorMsg.c_str()); } @@ -46,7 +48,7 @@ void PostLastInstallerError() { * @param dsn DSN name. * @return True on success and false on fail. */ -bool UnregisterDsn(const std::string& dsn) { +bool UnregisterDsn(const std::wstring& dsn) { if (SQLRemoveDSNFromIni(dsn.c_str())) { return true; } @@ -62,10 +64,11 @@ bool UnregisterDsn(const std::string& dsn) { * @param driver Driver. * @return True on success and false on fail. */ -bool RegisterDsn(const Configuration& config, LPCSTR driver) { +bool RegisterDsn(const Configuration& config, LPCWSTR driver) { const std::string& dsn = config.Get(FlightSqlConnection::DSN); + std::wstring wDsn = arrow::util::UTF8ToWideString(dsn).ValueOr(L""); - if (!SQLWriteDSNToIni(dsn.c_str(), driver)) { + if (!SQLWriteDSNToIni(wDsn.c_str(), driver)) { PostLastInstallerError(); return false; } @@ -78,8 +81,10 @@ bool RegisterDsn(const Configuration& config, LPCSTR driver) { continue; } - if (!SQLWritePrivateProfileString(dsn.c_str(), key.data(), it->second.c_str(), - "ODBC.INI")) { + std::wstring wKey = arrow::util::UTF8ToWideString(key).ValueOr(L""); + std::wstring wValue = arrow::util::UTF8ToWideString(it->second).ValueOr(L""); + if (!SQLWritePrivateProfileString(wDsn.c_str(), wKey.c_str(), wValue.c_str(), + L"ODBC.INI")) { PostLastInstallerError(); return false; } diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/system_dsn.h b/cpp/src/arrow/flight/sql/odbc/flight_sql/system_dsn.h index f3744d3428a..1ac9b4d9b80 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/system_dsn.h +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/system_dsn.h @@ -54,7 +54,7 @@ bool DisplayConnectionWindow(void* windowParent, Configuration& config, * @param driver Driver. * @return True on success and false on fail. */ -bool RegisterDsn(const Configuration& config, LPCSTR driver); +bool RegisterDsn(const Configuration& config, LPCWSTR driver); /** * Unregister specified DSN. @@ -62,4 +62,4 @@ bool RegisterDsn(const Configuration& config, LPCSTR driver); * @param dsn DSN name. * @return True on success and false on fail. */ -bool UnregisterDsn(const std::string& dsn); +bool UnregisterDsn(const std::wstring& dsn); diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/system_trust_store.cc b/cpp/src/arrow/flight/sql/odbc/flight_sql/system_trust_store.cc index 67db1fc35be..ebc8fd90adf 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/system_trust_store.cc +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/system_trust_store.cc @@ -15,6 +15,9 @@ // specific language governing permissions and limitations // under the License. +#include "arrow/result.h" +#include "arrow/util/utf8.h" + #include "arrow/flight/sql/odbc/flight_sql/system_trust_store.h" #if defined _WIN32 || defined _WIN64 @@ -32,18 +35,20 @@ std::string SystemTrustStore::GetNext() const { CryptBinaryToString(p_context_->pbCertEncoded, p_context_->cbCertEncoded, CRYPT_STRING_BASE64HEADER, nullptr, &size); - std::string cert; - cert.resize(size); + std::wstring wCert; + wCert.resize(size); CryptBinaryToString(p_context_->pbCertEncoded, p_context_->cbCertEncoded, - CRYPT_STRING_BASE64HEADER, &cert[0], &size); - cert.resize(size); + CRYPT_STRING_BASE64HEADER, &wCert[0], &size); + wCert.resize(size); + + std::string cert = arrow::util::WideStringToUTF8(wCert).ValueOr(""); return cert; } bool SystemTrustStore::SystemHasStore() { return h_store_ != nullptr; } -SystemTrustStore::SystemTrustStore(const char* store) +SystemTrustStore::SystemTrustStore(const wchar_t* store) : stores_(store), h_store_(CertOpenSystemStore(NULL, store)), p_context_(nullptr) {} SystemTrustStore::~SystemTrustStore() { diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/system_trust_store.h b/cpp/src/arrow/flight/sql/odbc/flight_sql/system_trust_store.h index f8e02fea526..0ff3adc2f48 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/system_trust_store.h +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/system_trust_store.h @@ -41,12 +41,12 @@ namespace flight_sql { /// https://github.com/apache/drill/blob/master/contrib/native/client/src/clientlib/wincert.ipp. class SystemTrustStore { private: - const char* stores_; + const wchar_t* stores_; HCERTSTORE h_store_; PCCERT_CONTEXT p_context_; public: - explicit SystemTrustStore(const char* store); + explicit SystemTrustStore(const wchar_t* store); ~SystemTrustStore(); diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/ui/add_property_window.cc b/cpp/src/arrow/flight/sql/odbc/flight_sql/ui/add_property_window.cc index 75aa491f781..15799c1f9a2 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/ui/add_property_window.cc +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/ui/add_property_window.cc @@ -33,7 +33,7 @@ namespace flight_sql { namespace config { AddPropertyWindow::AddPropertyWindow(Window* parent) - : CustomWindow(parent, "AddProperty", "Add Property"), + : CustomWindow(parent, L"AddProperty", L"Add Property"), width(300), height(120), accepted(false), @@ -69,7 +69,7 @@ void AddPropertyWindow::Create() { } } -bool AddPropertyWindow::GetProperty(std::string& key, std::string& value) { +bool AddPropertyWindow::GetProperty(std::wstring& key, std::wstring& value) { if (accepted) { key = this->key; value = this->value; @@ -87,10 +87,10 @@ void AddPropertyWindow::OnCreate() { int cancelPosX = width - MARGIN - BUTTON_WIDTH; int okPosX = cancelPosX - INTERVAL - BUTTON_WIDTH; - okButton = CreateButton(okPosX, groupPosY, BUTTON_WIDTH, BUTTON_HEIGHT, "Ok", + okButton = CreateButton(okPosX, groupPosY, BUTTON_WIDTH, BUTTON_HEIGHT, L"Ok", ChildId::OK_BUTTON, BS_DEFPUSHBUTTON); cancelButton = CreateButton(cancelPosX, groupPosY, BUTTON_WIDTH, BUTTON_HEIGHT, - "Cancel", ChildId::CANCEL_BUTTON); + L"Cancel", ChildId::CANCEL_BUTTON); isInitialized = true; CheckEnableOk(); } @@ -104,15 +104,15 @@ int AddPropertyWindow::CreateEdits(int posX, int posY, int sizeX) { int rowPos = posY; labels.push_back( - CreateLabel(posX, rowPos, LABEL_WIDTH, ROW_HEIGHT, "Key:", ChildId::KEY_LABEL)); - keyEdit = CreateEdit(editPosX, rowPos, editSizeX, ROW_HEIGHT, "", ChildId::KEY_EDIT); + CreateLabel(posX, rowPos, LABEL_WIDTH, ROW_HEIGHT, L"Key:", ChildId::KEY_LABEL)); + keyEdit = CreateEdit(editPosX, rowPos, editSizeX, ROW_HEIGHT, L"", ChildId::KEY_EDIT); rowPos += INTERVAL + ROW_HEIGHT; - labels.push_back( - CreateLabel(posX, rowPos, LABEL_WIDTH, ROW_HEIGHT, "Value:", ChildId::VALUE_LABEL)); + labels.push_back(CreateLabel(posX, rowPos, LABEL_WIDTH, ROW_HEIGHT, L"Value:", + ChildId::VALUE_LABEL)); valueEdit = - CreateEdit(editPosX, rowPos, editSizeX, ROW_HEIGHT, "", ChildId::VALUE_EDIT); + CreateEdit(editPosX, rowPos, editSizeX, ROW_HEIGHT, L"", ChildId::VALUE_EDIT); rowPos += INTERVAL + ROW_HEIGHT; diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/ui/custom_window.cc b/cpp/src/arrow/flight/sql/odbc/flight_sql/ui/custom_window.cc index 8f660a21329..bde7967c7e9 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/ui/custom_window.cc +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/ui/custom_window.cc @@ -84,7 +84,7 @@ LRESULT CALLBACK CustomWindow::WndProc(HWND hwnd, UINT msg, WPARAM wParam, return DefWindowProc(hwnd, msg, wParam, lParam); } -CustomWindow::CustomWindow(Window* parent, const char* className, const char* title) +CustomWindow::CustomWindow(Window* parent, const wchar_t* className, const wchar_t* title) : Window(parent, className, title) { WNDCLASS wcx; diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/ui/dsn_configuration_window.cc b/cpp/src/arrow/flight/sql/odbc/flight_sql/ui/dsn_configuration_window.cc index e469b6b067f..58bf9d37b48 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/ui/dsn_configuration_window.cc +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/ui/dsn_configuration_window.cc @@ -15,8 +15,11 @@ // specific language governing permissions and limitations // under the License. -#include "arrow/flight/sql/odbc/flight_sql/include/flight_sql/ui/dsn_configuration_window.h" +#include "arrow/result.h" +#include "arrow/util/utf8.h" + #include "arrow/flight/sql/odbc/flight_sql/flight_sql_connection.h" +#include "arrow/flight/sql/odbc/flight_sql/include/flight_sql/ui/dsn_configuration_window.h" #include #include @@ -55,7 +58,7 @@ namespace config { DsnConfigurationWindow::DsnConfigurationWindow(Window* parent, config::Configuration& config) - : CustomWindow(parent, "FlightConfigureDSN", "Configure Apache Arrow Flight SQL"), + : CustomWindow(parent, L"FlightConfigureDSN", L"Configure Apache Arrow Flight SQL"), width(480), height(375), config(config), @@ -91,11 +94,11 @@ void DsnConfigurationWindow::Create() { throw odbcabstraction::DriverException(buf.str()); } } - +//-AL- fix this file next void DsnConfigurationWindow::OnCreate() { tabControl = CreateTabControl(ChildId::TAB_CONTROL); - tabControl->AddTab("Common", COMMON_TAB); - tabControl->AddTab("Advanced", ADVANCED_TAB); + tabControl->AddTab(L"Common", COMMON_TAB); + tabControl->AddTab(L"Advanced", ADVANCED_TAB); int groupPosY = 3 * MARGIN; int groupSizeY = width - 2 * MARGIN; @@ -118,11 +121,11 @@ void DsnConfigurationWindow::OnCreate() { int buttonPosY = std::max(commonGroupPosY, advancedGroupPosY); testButton = CreateButton(testPosX, buttonPosY, BUTTON_WIDTH + 20, BUTTON_HEIGHT, - "Test Connection", ChildId::TEST_CONNECTION_BUTTON); - okButton = CreateButton(okPosX, buttonPosY, BUTTON_WIDTH, BUTTON_HEIGHT, "Ok", + L"Test Connection", ChildId::TEST_CONNECTION_BUTTON); + okButton = CreateButton(okPosX, buttonPosY, BUTTON_WIDTH, BUTTON_HEIGHT, L"Ok", ChildId::OK_BUTTON); cancelButton = CreateButton(cancelPosX, buttonPosY, BUTTON_WIDTH, BUTTON_HEIGHT, - "Cancel", ChildId::CANCEL_BUTTON); + L"Cancel", ChildId::CANCEL_BUTTON); isInitialized = true; CheckEnableOk(); SelectTab(COMMON_TAB); @@ -138,31 +141,35 @@ int DsnConfigurationWindow::CreateConnectionSettingsGroup(int posX, int posY, in int rowPos = posY + 2 * INTERVAL; - const char* val = config.Get(FlightSqlConnection::DSN).c_str(); + std::string val = config.Get(FlightSqlConnection::DSN); + std::wstring wVal = arrow::util::UTF8ToWideString(val).ValueOr(L""); labels.push_back(CreateLabel(labelPosX, rowPos, LABEL_WIDTH, ROW_HEIGHT, - "Data Source Name:", ChildId::NAME_LABEL)); - nameEdit = CreateEdit(editPosX, rowPos, editSizeX, ROW_HEIGHT, val, ChildId::NAME_EDIT); + L"Data Source Name:", ChildId::NAME_LABEL)); + nameEdit = CreateEdit(editPosX, rowPos, editSizeX, ROW_HEIGHT, wVal.c_str(), + ChildId::NAME_EDIT); rowPos += INTERVAL + ROW_HEIGHT; - val = config.Get(FlightSqlConnection::HOST).c_str(); - labels.push_back(CreateLabel(labelPosX, rowPos, LABEL_WIDTH, ROW_HEIGHT, - "Host Name:", ChildId::SERVER_LABEL)); - serverEdit = - CreateEdit(editPosX, rowPos, editSizeX, ROW_HEIGHT, val, ChildId::SERVER_EDIT); + val = config.Get(FlightSqlConnection::HOST); + wVal = arrow::util::UTF8ToWideString(val).ValueOr(L""); + labels.push_back(CreateLabel(labelPosX, rowPos, LABEL_WIDTH, ROW_HEIGHT, L"Host Name:", + ChildId::SERVER_LABEL)); + serverEdit = CreateEdit(editPosX, rowPos, editSizeX, ROW_HEIGHT, wVal.c_str(), + ChildId::SERVER_EDIT); rowPos += INTERVAL + ROW_HEIGHT; - val = config.Get(FlightSqlConnection::PORT).c_str(); - labels.push_back(CreateLabel(labelPosX, rowPos, LABEL_WIDTH, ROW_HEIGHT, - "Port:", ChildId::PORT_LABEL)); - portEdit = CreateEdit(editPosX, rowPos, editSizeX, ROW_HEIGHT, val, ChildId::PORT_EDIT, - ES_NUMBER); + val = config.Get(FlightSqlConnection::PORT); + wVal = arrow::util::UTF8ToWideString(val).ValueOr(L""); + labels.push_back(CreateLabel(labelPosX, rowPos, LABEL_WIDTH, ROW_HEIGHT, L"Port:", + ChildId::PORT_LABEL)); + portEdit = CreateEdit(editPosX, rowPos, editSizeX, ROW_HEIGHT, wVal.c_str(), + ChildId::PORT_EDIT, ES_NUMBER); rowPos += INTERVAL + ROW_HEIGHT; connectionSettingsGroupBox = - CreateGroupBox(posX, posY, sizeX, rowPos - posY, "Connection settings", + CreateGroupBox(posX, posY, sizeX, rowPos - posY, L"Connection settings", ChildId::CONNECTION_SETTINGS_GROUP_BOX); return rowPos - posY; @@ -179,36 +186,39 @@ int DsnConfigurationWindow::CreateAuthSettingsGroup(int posX, int posY, int size int rowPos = posY + 2 * INTERVAL; labels.push_back(CreateLabel(labelPosX, rowPos, LABEL_WIDTH, ROW_HEIGHT, - "Authentication Type:", ChildId::AUTH_TYPE_LABEL)); + L"Authentication Type:", ChildId::AUTH_TYPE_LABEL)); authTypeComboBox = CreateComboBox(editPosX, rowPos, editSizeX, ROW_HEIGHT, - "Authentication Type:", ChildId::AUTH_TYPE_COMBOBOX); - authTypeComboBox->AddString("Basic Authentication"); - authTypeComboBox->AddString("Token Authentication"); + L"Authentication Type:", ChildId::AUTH_TYPE_COMBOBOX); + authTypeComboBox->AddString(L"Basic Authentication"); + authTypeComboBox->AddString(L"Token Authentication"); rowPos += INTERVAL + ROW_HEIGHT; - const char* val = config.Get(FlightSqlConnection::UID).c_str(); + std::string val = config.Get(FlightSqlConnection::UID); + std::wstring wVal = arrow::util::UTF8ToWideString(val).ValueOr(L""); - labels.push_back(CreateLabel(labelPosX, rowPos, LABEL_WIDTH, ROW_HEIGHT, - "User:", ChildId::USER_LABEL)); - userEdit = CreateEdit(editPosX, rowPos, editSizeX, ROW_HEIGHT, val, ChildId::USER_EDIT); + labels.push_back(CreateLabel(labelPosX, rowPos, LABEL_WIDTH, ROW_HEIGHT, L"User:", + ChildId::USER_LABEL)); + userEdit = CreateEdit(editPosX, rowPos, editSizeX, ROW_HEIGHT, wVal.c_str(), + ChildId::USER_EDIT); rowPos += INTERVAL + ROW_HEIGHT; - val = config.Get(FlightSqlConnection::PWD).c_str(); - labels.push_back(CreateLabel(labelPosX, rowPos, LABEL_WIDTH, ROW_HEIGHT, - "Password:", ChildId::PASSWORD_LABEL)); - passwordEdit = CreateEdit(editPosX, rowPos, editSizeX, ROW_HEIGHT, val, + val = config.Get(FlightSqlConnection::PWD); + wVal = arrow::util::UTF8ToWideString(val).ValueOr(L""); + labels.push_back(CreateLabel(labelPosX, rowPos, LABEL_WIDTH, ROW_HEIGHT, L"Password:", + ChildId::PASSWORD_LABEL)); + passwordEdit = CreateEdit(editPosX, rowPos, editSizeX, ROW_HEIGHT, wVal.c_str(), ChildId::USER_EDIT, ES_PASSWORD); rowPos += INTERVAL + ROW_HEIGHT; const auto& token = config.Get(FlightSqlConnection::TOKEN); - val = token.c_str(); + wVal = arrow::util::UTF8ToWideString(token).ValueOr(L""); labels.push_back(CreateLabel(labelPosX, rowPos, LABEL_WIDTH, ROW_HEIGHT, - "Authentication Token:", ChildId::AUTH_TOKEN_LABEL)); - authTokenEdit = - CreateEdit(editPosX, rowPos, editSizeX, ROW_HEIGHT, val, ChildId::AUTH_TOKEN_EDIT); + L"Authentication Token:", ChildId::AUTH_TOKEN_LABEL)); + authTokenEdit = CreateEdit(editPosX, rowPos, editSizeX, ROW_HEIGHT, wVal.c_str(), + ChildId::AUTH_TOKEN_EDIT); authTokenEdit->SetEnabled(false); // Ensure the right elements are selected. @@ -218,7 +228,7 @@ int DsnConfigurationWindow::CreateAuthSettingsGroup(int posX, int posY, int size rowPos += INTERVAL + ROW_HEIGHT; authSettingsGroupBox = - CreateGroupBox(posX, posY, sizeX, rowPos - posY, "Authentication settings", + CreateGroupBox(posX, posY, sizeX, rowPos - posY, L"Authentication settings", ChildId::AUTH_SETTINGS_GROUP_BOX); return rowPos - posY; @@ -234,37 +244,38 @@ int DsnConfigurationWindow::CreateEncryptionSettingsGroup(int posX, int posY, in int rowPos = posY + 2 * INTERVAL; - const char* val = config.Get(FlightSqlConnection::USE_ENCRYPTION).c_str(); + std::string val = config.Get(FlightSqlConnection::USE_ENCRYPTION); const bool enableEncryption = driver::odbcabstraction::AsBool(val).value_or(true); labels.push_back(CreateLabel(labelPosX, rowPos, LABEL_WIDTH, ROW_HEIGHT, - "Use Encryption:", ChildId::ENABLE_ENCRYPTION_LABEL)); + L"Use Encryption:", ChildId::ENABLE_ENCRYPTION_LABEL)); enableEncryptionCheckBox = - CreateCheckBox(editPosX, rowPos - 2, editSizeX, ROW_HEIGHT, "", + CreateCheckBox(editPosX, rowPos - 2, editSizeX, ROW_HEIGHT, L"", ChildId::ENABLE_ENCRYPTION_CHECKBOX, enableEncryption); rowPos += INTERVAL + ROW_HEIGHT; - val = config.Get(FlightSqlConnection::TRUSTED_CERTS).c_str(); + val = config.Get(FlightSqlConnection::TRUSTED_CERTS); + std::wstring wVal = arrow::util::UTF8ToWideString(val).ValueOr(L""); labels.push_back(CreateLabel(labelPosX, rowPos, LABEL_WIDTH, ROW_HEIGHT, - "Certificate:", ChildId::CERTIFICATE_LABEL)); + L"Certificate:", ChildId::CERTIFICATE_LABEL)); certificateEdit = CreateEdit(editPosX, rowPos, editSizeX - MARGIN - BUTTON_WIDTH, - ROW_HEIGHT, val, ChildId::CERTIFICATE_EDIT); + ROW_HEIGHT, wVal.c_str(), ChildId::CERTIFICATE_EDIT); certificateBrowseButton = CreateButton(editPosX + editSizeX - BUTTON_WIDTH, rowPos - 2, BUTTON_WIDTH, - BUTTON_HEIGHT, "Browse", ChildId::CERTIFICATE_BROWSE_BUTTON); + BUTTON_HEIGHT, L"Browse", ChildId::CERTIFICATE_BROWSE_BUTTON); rowPos += INTERVAL + ROW_HEIGHT; val = config.Get(FlightSqlConnection::USE_SYSTEM_TRUST_STORE).c_str(); const bool useSystemCertStore = driver::odbcabstraction::AsBool(val).value_or(true); - labels.push_back( - CreateLabel(labelPosX, rowPos, LABEL_WIDTH, 2 * ROW_HEIGHT, - "Use System Certificate Store:", ChildId::USE_SYSTEM_CERT_STORE_LABEL)); + labels.push_back(CreateLabel(labelPosX, rowPos, LABEL_WIDTH, 2 * ROW_HEIGHT, + L"Use System Certificate Store:", + ChildId::USE_SYSTEM_CERT_STORE_LABEL)); useSystemCertStoreCheckBox = - CreateCheckBox(editPosX, rowPos - 2, 20, 2 * ROW_HEIGHT, "", + CreateCheckBox(editPosX, rowPos - 2, 20, 2 * ROW_HEIGHT, L"", ChildId::USE_SYSTEM_CERT_STORE_CHECKBOX, useSystemCertStore); val = config.Get(FlightSqlConnection::DISABLE_CERTIFICATE_VERIFICATION).c_str(); @@ -273,17 +284,17 @@ int DsnConfigurationWindow::CreateEncryptionSettingsGroup(int posX, int posY, in const int rightCheckPosX = rightPosX + (editPosX - labelPosX); const bool disableCertVerification = driver::odbcabstraction::AsBool(val).value_or(false); - labels.push_back(CreateLabel( - rightPosX, rowPos, LABEL_WIDTH, 2 * ROW_HEIGHT, - "Disable Certificate Verification:", ChildId::DISABLE_CERT_VERIFICATION_LABEL)); + labels.push_back(CreateLabel(rightPosX, rowPos, LABEL_WIDTH, 2 * ROW_HEIGHT, + L"Disable Certificate Verification:", + ChildId::DISABLE_CERT_VERIFICATION_LABEL)); disableCertVerificationCheckBox = CreateCheckBox( - rightCheckPosX, rowPos - 2, 20, 2 * ROW_HEIGHT, "", + rightCheckPosX, rowPos - 2, 20, 2 * ROW_HEIGHT, L"", ChildId::DISABLE_CERT_VERIFICATION_CHECKBOX, disableCertVerification); rowPos += INTERVAL + static_cast(1.5 * ROW_HEIGHT); encryptionSettingsGroupBox = - CreateGroupBox(posX, posY, sizeX, rowPos - posY, "Encryption settings", + CreateGroupBox(posX, posY, sizeX, rowPos - posY, L"Encryption settings", ChildId::AUTH_SETTINGS_GROUP_BOX); return rowPos - posY; @@ -301,12 +312,15 @@ int DsnConfigurationWindow::CreatePropertiesGroup(int posX, int posY, int sizeX) propertyList = CreateList(labelPosX, rowPos, listSize, listHeight, ChildId::PROPERTY_LIST); - propertyList->ListAddColumn("Key", 0, columnSize); - propertyList->ListAddColumn("Value", 1, columnSize); + propertyList->ListAddColumn(L"Key", 0, columnSize); + propertyList->ListAddColumn(L"Value", 1, columnSize); const auto keys = config.GetCustomKeys(); for (const auto& key : keys) { - propertyList->ListAddItem({key, config.Get(key)}); + std::wstring wKey = arrow::util::UTF8ToWideString(key).ValueOr(L""); + std::wstring wVal = arrow::util::UTF8ToWideString(config.Get(key)).ValueOr(L""); + + propertyList->ListAddItem({wKey, wVal}); } SendMessage(propertyList->GetHandle(), LVM_SETEXTENDEDLISTVIEWSTYLE, @@ -316,15 +330,15 @@ int DsnConfigurationWindow::CreatePropertiesGroup(int posX, int posY, int sizeX) int deletePosX = width - INTERVAL - MARGIN - BUTTON_WIDTH; int addPosX = deletePosX - INTERVAL - BUTTON_WIDTH; - addButton = CreateButton(addPosX, rowPos, BUTTON_WIDTH, BUTTON_HEIGHT, "Add", + addButton = CreateButton(addPosX, rowPos, BUTTON_WIDTH, BUTTON_HEIGHT, L"Add", ChildId::ADD_BUTTON); - deleteButton = CreateButton(deletePosX, rowPos, BUTTON_WIDTH, BUTTON_HEIGHT, "Delete", + deleteButton = CreateButton(deletePosX, rowPos, BUTTON_WIDTH, BUTTON_HEIGHT, L"Delete", ChildId::DELETE_BUTTON); rowPos += INTERVAL + BUTTON_HEIGHT; propertyGroupBox = CreateGroupBox(posX, posY, sizeX, rowPos - posY, - "Advanced properties", ChildId::PROPERTY_GROUP_BOX); + L"Advanced properties", ChildId::PROPERTY_GROUP_BOX); return rowPos - posY; } @@ -384,7 +398,7 @@ void DsnConfigurationWindow::CheckEnableOk() { void DsnConfigurationWindow::SaveParameters(Configuration& targetConfig) { targetConfig.Clear(); - std::string text; + std::wstring text; nameEdit->GetText(text); targetConfig.Set(FlightSqlConnection::DSN, text); serverEdit->GetText(text); @@ -427,7 +441,9 @@ void DsnConfigurationWindow::SaveParameters(Configuration& targetConfig) { // Get all the list properties. const auto properties = propertyList->ListGetAll(); for (const auto& property : properties) { - targetConfig.Set(property[0], property[1]); + std::string propertyKey = arrow::util::WideStringToUTF8(property[0]).ValueOr(""); + std::string propertyValue = arrow::util::WideStringToUTF8(property[1]).ValueOr(""); + targetConfig.Set(propertyKey, propertyValue); } } @@ -463,10 +479,13 @@ bool DsnConfigurationWindow::OnMessage(UINT msg, WPARAM wParam, LPARAM lParam) { SaveParameters(testConfig); std::string testMessage = TestConnection(testConfig); - MessageBox(NULL, testMessage.c_str(), "Test Connection Success", MB_OK); + std::wstring wTestMessage = + arrow::util::UTF8ToWideString(testMessage).ValueOr(L""); + MessageBox(NULL, wTestMessage.c_str(), L"Test Connection Success", MB_OK); } catch (odbcabstraction::DriverException& err) { - MessageBox(NULL, err.GetMessageText().c_str(), "Error!", - MB_ICONEXCLAMATION | MB_OK); + std::wstring wMessageText = + arrow::util::UTF8ToWideString(err.GetMessageText()).ValueOr(L""); + MessageBox(NULL, wMessageText.c_str(), L"Error!", MB_ICONEXCLAMATION | MB_OK); } break; @@ -477,8 +496,9 @@ bool DsnConfigurationWindow::OnMessage(UINT msg, WPARAM wParam, LPARAM lParam) { accepted = true; PostMessage(GetHandle(), WM_CLOSE, 0, 0); } catch (odbcabstraction::DriverException& err) { - MessageBox(NULL, err.GetMessageText().c_str(), "Error!", - MB_ICONEXCLAMATION | MB_OK); + std::wstring wMessageText = + arrow::util::UTF8ToWideString(err.GetMessageText()).ValueOr(L""); + MessageBox(NULL, wMessageText.c_str(), L"Error!", MB_ICONEXCLAMATION | MB_OK); } break; @@ -520,7 +540,7 @@ bool DsnConfigurationWindow::OnMessage(UINT msg, WPARAM wParam, LPARAM lParam) { case ChildId::CERTIFICATE_BROWSE_BUTTON: { OPENFILENAME openFileName; - char fileName[FILENAME_MAX]; + wchar_t fileName[FILENAME_MAX]; ZeroMemory(&openFileName, sizeof(openFileName)); openFileName.lStructSize = sizeof(openFileName); @@ -529,7 +549,7 @@ bool DsnConfigurationWindow::OnMessage(UINT msg, WPARAM wParam, LPARAM lParam) { openFileName.lpstrFile[0] = '\0'; openFileName.nMaxFile = FILENAME_MAX; // TODO: What type should this be? - openFileName.lpstrFilter = "All\0*.*"; + openFileName.lpstrFilter = L"All\0*.*"; openFileName.nFilterIndex = 1; openFileName.lpstrFileTitle = NULL; openFileName.nMaxFileTitle = 0; @@ -566,8 +586,8 @@ bool DsnConfigurationWindow::OnMessage(UINT msg, WPARAM wParam, LPARAM lParam) { addWindow.Update(); if (ProcessMessages(addWindow) == Result::OK) { - std::string key; - std::string value; + std::wstring key; + std::wstring value; addWindow.GetProperty(key, value); propertyList->ListAddItem({key, value}); } diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/ui/window.cc b/cpp/src/arrow/flight/sql/odbc/flight_sql/ui/window.cc index f88cd8a3f88..2940c95578a 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/ui/window.cc +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/ui/window.cc @@ -49,7 +49,7 @@ HINSTANCE GetHInstance() { return hInstance; } -Window::Window(Window* parent, const char* className, const char* title) +Window::Window(Window* parent, const wchar_t* className, const wchar_t* title) : className(className), title(title), handle(NULL), parent(parent), created(false) { // No-op. } @@ -88,7 +88,7 @@ void Window::Create(DWORD style, int posX, int posY, int width, int height, int } std::unique_ptr Window::CreateTabControl(int id) { - std::unique_ptr child(new Window(this, WC_TABCONTROL, "")); + std::unique_ptr child(new Window(this, WC_TABCONTROL, L"")); // Get the dimensions of the parent window's client area, and // create a tab control child window of that size. @@ -103,7 +103,7 @@ std::unique_ptr Window::CreateTabControl(int id) { std::unique_ptr Window::CreateList(int posX, int posY, int sizeX, int sizeY, int id) { - std::unique_ptr child(new Window(this, WC_LISTVIEW, "")); + std::unique_ptr child(new Window(this, WC_LISTVIEW, L"")); child->Create( WS_CHILD | WS_VISIBLE | WS_BORDER | LVS_REPORT | LVS_EDITLABELS | WS_TABSTOP, posX, @@ -113,8 +113,8 @@ std::unique_ptr Window::CreateList(int posX, int posY, int sizeX, int si } std::unique_ptr Window::CreateGroupBox(int posX, int posY, int sizeX, int sizeY, - const char* title, int id) { - std::unique_ptr child(new Window(this, "Button", title)); + const wchar_t* title, int id) { + std::unique_ptr child(new Window(this, L"Button", title)); child->Create(WS_CHILD | WS_VISIBLE | BS_GROUPBOX, posX, posY, sizeX, sizeY, id); @@ -122,8 +122,8 @@ std::unique_ptr Window::CreateGroupBox(int posX, int posY, int sizeX, in } std::unique_ptr Window::CreateLabel(int posX, int posY, int sizeX, int sizeY, - const char* title, int id) { - std::unique_ptr child(new Window(this, "Static", title)); + const wchar_t* title, int id) { + std::unique_ptr child(new Window(this, L"Static", title)); child->Create(WS_CHILD | WS_VISIBLE, posX, posY, sizeX, sizeY, id); @@ -131,8 +131,8 @@ std::unique_ptr Window::CreateLabel(int posX, int posY, int sizeX, int s } std::unique_ptr Window::CreateEdit(int posX, int posY, int sizeX, int sizeY, - const char* title, int id, int style) { - std::unique_ptr child(new Window(this, "Edit", title)); + const wchar_t* title, int id, int style) { + std::unique_ptr child(new Window(this, L"Edit", title)); child->Create(WS_CHILD | WS_VISIBLE | WS_BORDER | ES_AUTOHSCROLL | WS_TABSTOP | style, posX, posY, sizeX, sizeY, id); @@ -141,8 +141,8 @@ std::unique_ptr Window::CreateEdit(int posX, int posY, int sizeX, int si } std::unique_ptr Window::CreateButton(int posX, int posY, int sizeX, int sizeY, - const char* title, int id, int style) { - std::unique_ptr child(new Window(this, "Button", title)); + const wchar_t* title, int id, int style) { + std::unique_ptr child(new Window(this, L"Button", title)); child->Create(WS_CHILD | WS_VISIBLE | WS_TABSTOP | style, posX, posY, sizeX, sizeY, id); @@ -150,8 +150,8 @@ std::unique_ptr Window::CreateButton(int posX, int posY, int sizeX, int } std::unique_ptr Window::CreateCheckBox(int posX, int posY, int sizeX, int sizeY, - const char* title, int id, bool state) { - std::unique_ptr child(new Window(this, "Button", title)); + const wchar_t* title, int id, bool state) { + std::unique_ptr child(new Window(this, L"Button", title)); child->Create(WS_CHILD | WS_VISIBLE | BS_CHECKBOX | WS_TABSTOP, posX, posY, sizeX, sizeY, id); @@ -162,8 +162,8 @@ std::unique_ptr Window::CreateCheckBox(int posX, int posY, int sizeX, in } std::unique_ptr Window::CreateComboBox(int posX, int posY, int sizeX, int sizeY, - const char* title, int id) { - std::unique_ptr child(new Window(this, "Combobox", title)); + const wchar_t* title, int id) { + std::unique_ptr child(new Window(this, L"Combobox", title)); child->Create(WS_CHILD | WS_VISIBLE | CBS_DROPDOWNLIST | WS_TABSTOP, posX, posY, sizeX, sizeY, id); @@ -194,12 +194,12 @@ bool Window::IsTextEmpty() const { return (len <= 0); } -void Window::ListAddColumn(const std::string& name, int index, int width) { +void Window::ListAddColumn(const std::wstring& name, int index, int width) { LVCOLUMN lvc; lvc.mask = LVCF_FMT | LVCF_WIDTH | LVCF_TEXT | LVCF_SUBITEM; lvc.fmt = LVCFMT_LEFT; lvc.cx = width; - lvc.pszText = const_cast(name.c_str()); + lvc.pszText = const_cast(name.c_str()); lvc.iSubItem = index; if (ListView_InsertColumn(handle, index, &lvc) == -1) { @@ -209,10 +209,10 @@ void Window::ListAddColumn(const std::string& name, int index, int width) { } } -void Window::ListAddItem(const std::vector& items) { +void Window::ListAddItem(const std::vector& items) { LVITEM lvi = {0}; lvi.mask = LVIF_TEXT; - lvi.pszText = const_cast(items[0].c_str()); + lvi.pszText = const_cast(items[0].c_str()); int ret = ListView_InsertItem(handle, &lvi); if (ret < 0) { @@ -223,7 +223,7 @@ void Window::ListAddItem(const std::vector& items) { for (size_t i = 1; i < items.size(); ++i) { ListView_SetItemText(handle, ret, static_cast(i), - const_cast(items[i].c_str())); + const_cast(items[i].c_str())); } } @@ -238,15 +238,15 @@ void Window::ListDeleteSelectedItem() { } } -std::vector > Window::ListGetAll() { +std::vector > Window::ListGetAll() { #define BUF_LEN 1024 - char buf[BUF_LEN]; + wchar_t buf[BUF_LEN]; - std::vector > values; + std::vector > values; const int numColumns = Header_GetItemCount(ListView_GetHeader(handle)); const int numItems = ListView_GetItemCount(handle); for (int i = 0; i < numItems; ++i) { - std::vector row; + std::vector row; for (int j = 0; j < numColumns; ++j) { ListView_GetItemText(handle, i, j, buf, BUF_LEN); row.emplace_back(buf); @@ -257,11 +257,11 @@ std::vector > Window::ListGetAll() { return values; } -void Window::AddTab(const std::string& name, int index) { +void Window::AddTab(const std::wstring& name, int index) { TCITEM tabControlItem; tabControlItem.mask = TCIF_TEXT | TCIF_IMAGE; tabControlItem.iImage = -1; - tabControlItem.pszText = const_cast(name.c_str()); + tabControlItem.pszText = const_cast(name.c_str()); if (TabCtrl_InsertItem(handle, index, &tabControlItem) == -1) { std::stringstream buf; buf << "Can not add tab, error code: " << GetLastError(); @@ -269,7 +269,7 @@ void Window::AddTab(const std::string& name, int index) { } } -void Window::GetText(std::string& text) const { +void Window::GetText(std::wstring& text) const { if (!IsEnabled()) { text.clear(); @@ -292,7 +292,7 @@ void Window::GetText(std::string& text) const { boost::algorithm::trim(text); } -void Window::SetText(const std::string& text) const { +void Window::SetText(const std::wstring& text) const { SNDMSG(handle, WM_SETTEXT, 0, reinterpret_cast(text.c_str())); } @@ -304,7 +304,7 @@ void Window::SetChecked(bool state) { Button_SetCheck(handle, state ? BST_CHECKED : BST_UNCHECKED); } -void Window::AddString(const std::string& str) { +void Window::AddString(const std::wstring& str) { SNDMSG(handle, CB_ADDSTRING, 0, reinterpret_cast(str.c_str())); } diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/win_system_dsn.cc b/cpp/src/arrow/flight/sql/odbc/flight_sql/win_system_dsn.cc index 18b5c399c2c..2017936dd90 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/win_system_dsn.cc +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/win_system_dsn.cc @@ -22,6 +22,9 @@ #include #include +#include "arrow/result.h" +#include "arrow/util/utf8.h" + #include "arrow/flight/sql/odbc/flight_sql/flight_sql_connection.h" #include "arrow/flight/sql/odbc/flight_sql/include/flight_sql/config/configuration.h" #include "arrow/flight/sql/odbc/flight_sql/include/flight_sql/config/connection_string_parser.h" @@ -44,12 +47,6 @@ using driver::flight_sql::config::Result; using driver::flight_sql::config::Window; using driver::odbcabstraction::DriverException; -BOOL CALLBACK ConfigDriver(HWND hwndParent, WORD fRequest, LPCSTR lpszDriver, - LPCSTR lpszArgs, LPSTR lpszMsg, WORD cbMsgMax, - WORD* pcbMsgOut) { - return false; -} - bool DisplayConnectionWindow(void* windowParent, Configuration& config) { HWND hwndParent = (HWND)windowParent; @@ -69,10 +66,12 @@ bool DisplayConnectionWindow(void* windowParent, Configuration& config) { std::stringstream buf; buf << "SQL State: " << err.GetSqlState() << ", Message: " << err.GetMessageText() << ", Code: " << err.GetNativeError(); - std::string message = buf.str(); - MessageBox(NULL, message.c_str(), "Error!", MB_ICONEXCLAMATION | MB_OK); + std::wstring wMessage = arrow::util::UTF8ToWideString(buf.str()).ValueOr(L""); + MessageBox(NULL, wMessage.c_str(), L"Error!", MB_ICONEXCLAMATION | MB_OK); - SQLPostInstallerError(err.GetNativeError(), err.GetMessageText().c_str()); + std::wstring wMessageText = + arrow::util::UTF8ToWideString(err.GetMessageText()).ValueOr(L""); + SQLPostInstallerError(err.GetNativeError(), wMessageText.c_str()); } return false; @@ -93,15 +92,17 @@ bool DisplayConnectionWindow(void* windowParent, Configuration& config, } } -BOOL INSTAPI ConfigDSN(HWND hwndParent, WORD req, LPCSTR driver, LPCSTR attributes) { +BOOL INSTAPI ConfigDSNW(HWND hwndParent, WORD req, LPCWSTR wDriver, LPCWSTR wAttributes) { Configuration config; ConnectionStringParser parser(config); - parser.ParseConfigAttributes(attributes); + std::string attributes = + arrow::util::WideStringToUTF8(std::wstring(wAttributes)).ValueOr(""); + parser.ParseConfigAttributes(attributes.c_str()); switch (req) { case ODBC_ADD_DSN: { config.LoadDefaults(); - if (!DisplayConnectionWindow(hwndParent, config) || !RegisterDsn(config, driver)) + if (!DisplayConnectionWindow(hwndParent, config) || !RegisterDsn(config, wDriver)) return FALSE; break; @@ -109,13 +110,14 @@ BOOL INSTAPI ConfigDSN(HWND hwndParent, WORD req, LPCSTR driver, LPCSTR attribut case ODBC_CONFIG_DSN: { const std::string& dsn = config.Get(FlightSqlConnection::DSN); - if (!SQLValidDSN(dsn.c_str())) return FALSE; + std::wstring wDsn = arrow::util::UTF8ToWideString(dsn).ValueOr(L""); + if (!SQLValidDSN(wDsn.c_str())) return FALSE; Configuration loaded(config); loaded.LoadDsn(dsn); - if (!DisplayConnectionWindow(hwndParent, loaded) || !UnregisterDsn(dsn.c_str()) || - !RegisterDsn(loaded, driver)) + if (!DisplayConnectionWindow(hwndParent, loaded) || !UnregisterDsn(wDsn.c_str()) || + !RegisterDsn(loaded, wDriver)) return FALSE; break; @@ -123,7 +125,8 @@ BOOL INSTAPI ConfigDSN(HWND hwndParent, WORD req, LPCSTR driver, LPCSTR attribut case ODBC_REMOVE_DSN: { const std::string& dsn = config.Get(FlightSqlConnection::DSN); - if (!SQLValidDSN(dsn.c_str()) || !UnregisterDsn(dsn)) return FALSE; + std::wstring wDsn = arrow::util::UTF8ToWideString(dsn).ValueOr(L""); + if (!SQLValidDSN(wDsn.c_str()) || !UnregisterDsn(wDsn)) return FALSE; break; } diff --git a/cpp/src/arrow/flight/sql/odbc/odbc.def b/cpp/src/arrow/flight/sql/odbc/odbc.def index c90c181d7c1..ed3203afbb4 100644 --- a/cpp/src/arrow/flight/sql/odbc/odbc.def +++ b/cpp/src/arrow/flight/sql/odbc/odbc.def @@ -17,7 +17,7 @@ LIBRARY arrow_flight_sql_odbc EXPORTS - ConfigDSN + ConfigDSNW SQLAllocConnect SQLAllocEnv SQLAllocHandle diff --git a/cpp/src/arrow/flight/sql/odbc/odbc_api.cc b/cpp/src/arrow/flight/sql/odbc/odbc_api.cc index 21d060ad70f..4e732f16f56 100644 --- a/cpp/src/arrow/flight/sql/odbc/odbc_api.cc +++ b/cpp/src/arrow/flight/sql/odbc/odbc_api.cc @@ -224,10 +224,9 @@ inline bool IsValidStringFieldArgs(SQLPOINTER diagInfoPtr, SQLSMALLINT bufferLen return hasValidBuffer || stringLengthPtr; } -SQLRETURN SQLGetDiagFieldW(SQLSMALLINT handleType, SQLHANDLE handle, - SQLSMALLINT recNumber, SQLSMALLINT diagIdentifier, - SQLPOINTER diagInfoPtr, SQLSMALLINT bufferLength, - SQLSMALLINT* stringLengthPtr) { +SQLRETURN SQLGetDiagField(SQLSMALLINT handleType, SQLHANDLE handle, SQLSMALLINT recNumber, + SQLSMALLINT diagIdentifier, SQLPOINTER diagInfoPtr, + SQLSMALLINT bufferLength, SQLSMALLINT* stringLengthPtr) { // TODO: Implement additional fields types // https://github.com/apache/arrow/issues/46573 using driver::odbcabstraction::Diagnostics; @@ -478,10 +477,10 @@ SQLRETURN SQLGetDiagFieldW(SQLSMALLINT handleType, SQLHANDLE handle, return SQL_ERROR; } -SQLRETURN SQLGetDiagRecW(SQLSMALLINT handleType, SQLHANDLE handle, SQLSMALLINT recNumber, - SQLWCHAR* sqlState, SQLINTEGER* nativeErrorPtr, - SQLWCHAR* messageText, SQLSMALLINT bufferLength, - SQLSMALLINT* textLengthPtr) { +SQLRETURN SQLGetDiagRec(SQLSMALLINT handleType, SQLHANDLE handle, SQLSMALLINT recNumber, + SQLWCHAR* sqlState, SQLINTEGER* nativeErrorPtr, + SQLWCHAR* messageText, SQLSMALLINT bufferLength, + SQLSMALLINT* textLengthPtr) { using driver::odbcabstraction::Diagnostics; using ODBC::GetStringAttribute; using ODBC::ODBCConnection; @@ -677,17 +676,17 @@ SQLRETURN SQLSetEnvAttr(SQLHENV env, SQLINTEGER attr, SQLPOINTER valuePtr, }); } -SQLRETURN SQLDriverConnectW(SQLHDBC conn, SQLHWND windowHandle, - SQLWCHAR* inConnectionString, - SQLSMALLINT inConnectionStringLen, - SQLWCHAR* outConnectionString, - SQLSMALLINT outConnectionStringBufferLen, - SQLSMALLINT* outConnectionStringLen, - SQLUSMALLINT driverCompletion) { +SQLRETURN SQLDriverConnect(SQLHDBC conn, SQLHWND windowHandle, + SQLWCHAR* inConnectionString, + SQLSMALLINT inConnectionStringLen, + SQLWCHAR* outConnectionString, + SQLSMALLINT outConnectionStringBufferLen, + SQLSMALLINT* outConnectionStringLen, + SQLUSMALLINT driverCompletion) { // TODO: Implement FILEDSN and SAVEFILE keywords according to the spec // https://github.com/apache/arrow/issues/46449 - // TODO: Copy connection string properly in SQLDriverConnectW according to the + // TODO: Copy connection string properly in SQLDriverConnect according to the // spec https://github.com/apache/arrow/issues/46560 using driver::odbcabstraction::Connection; @@ -712,7 +711,7 @@ SQLRETURN SQLDriverConnectW(SQLHDBC conn, SQLHWND windowHandle, std::vector missing_properties; - // TODO: Implement SQL_DRIVER_COMPLETE_REQUIRED in SQLDriverConnectW according to the + // TODO: Implement SQL_DRIVER_COMPLETE_REQUIRED in SQLDriverConnect according to the // spec https://github.com/apache/arrow/issues/46448 #if defined _WIN32 || defined _WIN64 // Load the DSN window according to driverCompletion @@ -757,9 +756,9 @@ SQLRETURN SQLDriverConnectW(SQLHDBC conn, SQLHWND windowHandle, }); } -SQLRETURN SQLConnectW(SQLHDBC conn, SQLWCHAR* dsnName, SQLSMALLINT dsnNameLen, - SQLWCHAR* userName, SQLSMALLINT userNameLen, SQLWCHAR* password, - SQLSMALLINT passwordLen) { +SQLRETURN SQLConnect(SQLHDBC conn, SQLWCHAR* dsnName, SQLSMALLINT dsnNameLen, + SQLWCHAR* userName, SQLSMALLINT userNameLen, SQLWCHAR* password, + SQLSMALLINT passwordLen) { using driver::flight_sql::FlightSqlConnection; using driver::flight_sql::config::Configuration; using ODBC::ODBCConnection; @@ -811,8 +810,8 @@ SQLRETURN SQLDisconnect(SQLHDBC conn) { }); } -SQLRETURN SQLGetInfoW(SQLHDBC conn, SQLUSMALLINT infoType, SQLPOINTER infoValuePtr, - SQLSMALLINT bufLen, SQLSMALLINT* length) { +SQLRETURN SQLGetInfo(SQLHDBC conn, SQLUSMALLINT infoType, SQLPOINTER infoValuePtr, + SQLSMALLINT bufLen, SQLSMALLINT* length) { // TODO: complete implementation of SQLGetInfoW and write tests using ODBC::ODBCConnection; diff --git a/cpp/src/arrow/flight/sql/odbc/odbc_api.h b/cpp/src/arrow/flight/sql/odbc/odbc_api.h index 9350cead384..eb2f677a385 100644 --- a/cpp/src/arrow/flight/sql/odbc/odbc_api.h +++ b/cpp/src/arrow/flight/sql/odbc/odbc_api.h @@ -32,29 +32,28 @@ namespace arrow { SQLRETURN SQLAllocHandle(SQLSMALLINT type, SQLHANDLE parent, SQLHANDLE* result); SQLRETURN SQLFreeHandle(SQLSMALLINT type, SQLHANDLE handle); SQLRETURN SQLFreeStmt(SQLHSTMT stmt, SQLUSMALLINT option); -SQLRETURN SQLGetDiagFieldW(SQLSMALLINT handleType, SQLHANDLE handle, - SQLSMALLINT recNumber, SQLSMALLINT diagIdentifier, - SQLPOINTER diagInfoPtr, SQLSMALLINT bufferLength, - SQLSMALLINT* stringLengthPtr); -SQLRETURN SQLGetDiagRecW(SQLSMALLINT handleType, SQLHANDLE handle, SQLSMALLINT recNumber, - SQLWCHAR* sqlState, SQLINTEGER* nativeErrorPtr, - SQLWCHAR* messageText, SQLSMALLINT bufferLength, - SQLSMALLINT* textLengthPtr); +SQLRETURN SQLGetDiagField(SQLSMALLINT handleType, SQLHANDLE handle, SQLSMALLINT recNumber, + SQLSMALLINT diagIdentifier, SQLPOINTER diagInfoPtr, + SQLSMALLINT bufferLength, SQLSMALLINT* stringLengthPtr); +SQLRETURN SQLGetDiagRec(SQLSMALLINT handleType, SQLHANDLE handle, SQLSMALLINT recNumber, + SQLWCHAR* sqlState, SQLINTEGER* nativeErrorPtr, + SQLWCHAR* messageText, SQLSMALLINT bufferLength, + SQLSMALLINT* textLengthPtr); SQLRETURN SQLGetEnvAttr(SQLHENV env, SQLINTEGER attr, SQLPOINTER valuePtr, SQLINTEGER bufferLen, SQLINTEGER* strLenPtr); SQLRETURN SQLSetEnvAttr(SQLHENV env, SQLINTEGER attr, SQLPOINTER valuePtr, SQLINTEGER strLen); -SQLRETURN SQLDriverConnectW(SQLHDBC conn, SQLHWND windowHandle, - SQLWCHAR* inConnectionString, - SQLSMALLINT inConnectionStringLen, - SQLWCHAR* outConnectionString, - SQLSMALLINT outConnectionStringBufferLen, - SQLSMALLINT* outConnectionStringLen, - SQLUSMALLINT driverCompletion); -SQLRETURN SQLConnectW(SQLHDBC conn, SQLWCHAR* dsnName, SQLSMALLINT dsnNameLen, - SQLWCHAR* userName, SQLSMALLINT userNameLen, SQLWCHAR* password, - SQLSMALLINT passwordLen); +SQLRETURN SQLDriverConnect(SQLHDBC conn, SQLHWND windowHandle, + SQLWCHAR* inConnectionString, + SQLSMALLINT inConnectionStringLen, + SQLWCHAR* outConnectionString, + SQLSMALLINT outConnectionStringBufferLen, + SQLSMALLINT* outConnectionStringLen, + SQLUSMALLINT driverCompletion); +SQLRETURN SQLConnect(SQLHDBC conn, SQLWCHAR* dsnName, SQLSMALLINT dsnNameLen, + SQLWCHAR* userName, SQLSMALLINT userNameLen, SQLWCHAR* password, + SQLSMALLINT passwordLen); SQLRETURN SQLDisconnect(SQLHDBC conn); -SQLRETURN SQLGetInfoW(SQLHDBC conn, SQLUSMALLINT infoType, SQLPOINTER infoValuePtr, - SQLSMALLINT bufLen, SQLSMALLINT* length); +SQLRETURN SQLGetInfo(SQLHDBC conn, SQLUSMALLINT infoType, SQLPOINTER infoValuePtr, + SQLSMALLINT bufLen, SQLSMALLINT* length); } // namespace arrow diff --git a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/odbc_impl/odbc_connection.cc b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/odbc_impl/odbc_connection.cc index f28ee1789b0..3da4a63c5f5 100644 --- a/cpp/src/arrow/flight/sql/odbc/odbcabstraction/odbc_impl/odbc_connection.cc +++ b/cpp/src/arrow/flight/sql/odbc/odbcabstraction/odbc_impl/odbc_connection.cc @@ -17,6 +17,9 @@ #include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/odbc_connection.h" +#include "arrow/result.h" +#include "arrow/util/utf8.h" + #include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/exceptions.h" #include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/attribute_utils.h" #include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/odbc_descriptor.h" @@ -56,40 +59,44 @@ const boost::xpressive::sregex CONNECTION_STR_REGEX( void loadPropertiesFromDSN(const std::string& dsn, Connection::ConnPropertyMap& properties) { const size_t BUFFER_SIZE = 1024 * 10; - std::vector outputBuffer; + std::vector outputBuffer; outputBuffer.resize(BUFFER_SIZE, '\0'); SQLSetConfigMode(ODBC_BOTH_DSN); - SQLGetPrivateProfileString(dsn.c_str(), NULL, "", &outputBuffer[0], BUFFER_SIZE, - "odbc.ini"); + std::wstring wDsn = arrow::util::UTF8ToWideString(dsn).ValueOr(L""); + + SQLGetPrivateProfileString(wDsn.c_str(), NULL, L"", &outputBuffer[0], BUFFER_SIZE, + L"odbc.ini"); // The output buffer holds the list of keys in a series of NUL-terminated strings. // The series is terminated with an empty string (eg a NUL-terminator terminating the // last key followed by a NUL terminator after). - std::vector keys; + std::vector keys; size_t pos = 0; while (pos < BUFFER_SIZE) { - std::string key(&outputBuffer[pos]); - if (key.empty()) { + std::wstring wKey(&outputBuffer[pos]); + if (wKey.empty()) { break; } - size_t len = key.size(); + size_t len = wKey.size(); // Skip over Driver or DSN keys. - if (!boost::iequals(key, "DSN") && !boost::iequals(key, "Driver")) { - keys.emplace_back(std::move(key)); + if (!boost::iequals(wKey, L"DSN") && !boost::iequals(wKey, L"Driver")) { + keys.emplace_back(std::move(wKey)); } pos += len + 1; } - for (auto& key : keys) { + for (auto& wKey : keys) { outputBuffer.clear(); outputBuffer.resize(BUFFER_SIZE, '\0'); - SQLGetPrivateProfileString(dsn.c_str(), key.data(), "", &outputBuffer[0], BUFFER_SIZE, - "odbc.ini"); + SQLGetPrivateProfileString(wDsn.c_str(), wKey.data(), L"", &outputBuffer[0], + BUFFER_SIZE, L"odbc.ini"); - std::string value = std::string(&outputBuffer[0]); - auto propIter = properties.find(std::string(key)); + std::wstring wValue = std::wstring(&outputBuffer[0]); + std::string value = arrow::util::WideStringToUTF8(wValue).ValueOr(""); + std::string key = arrow::util::WideStringToUTF8(std::wstring(wKey)).ValueOr(""); + auto propIter = properties.find(key); if (propIter == properties.end()) { properties.emplace(std::make_pair(std::move(key), std::move(value))); } diff --git a/cpp/src/arrow/flight/sql/odbc/tests/CMakeLists.txt b/cpp/src/arrow/flight/sql/odbc/tests/CMakeLists.txt index 41e51182275..5f055682dfb 100644 --- a/cpp/src/arrow/flight/sql/odbc/tests/CMakeLists.txt +++ b/cpp/src/arrow/flight/sql/odbc/tests/CMakeLists.txt @@ -19,8 +19,6 @@ add_custom_target(tests) include_directories(${ODBC_INCLUDE_DIRS}) -add_definitions(-DUNICODE=1) - find_package(SQLite3Alt REQUIRED) set(ARROW_FLIGHT_SQL_MOCK_SERVER_SRCS diff --git a/cpp/src/arrow/flight/sql/odbc/tests/connection_test.cc b/cpp/src/arrow/flight/sql/odbc/tests/connection_test.cc index 176129ba627..99fce0684c7 100644 --- a/cpp/src/arrow/flight/sql/odbc/tests/connection_test.cc +++ b/cpp/src/arrow/flight/sql/odbc/tests/connection_test.cc @@ -400,10 +400,8 @@ TEST_F(FlightSQLODBCRemoteTestBase, TestSQLDriverConnectInvalidUid) { VerifyOdbcErrorState(SQL_HANDLE_DBC, conn, std::string("28000")); - // TODO: Check that outstr remains empty after SqlWcharToString - // is fixed to handle empty `outstr` - // std::string out_connection_string = ODBC::SqlWcharToString(outstr, outstrlen); - // EXPECT_TRUE(out_connection_string.empty()); + std::string out_connection_string = ODBC::SqlWcharToString(outstr, outstrlen); + EXPECT_TRUE(out_connection_string.empty()); // Free connection handle ret = SQLFreeHandle(SQL_HANDLE_DBC, conn); @@ -463,7 +461,7 @@ TYPED_TEST(FlightSQLODBCTestBase, TestSQLConnect) { EXPECT_TRUE(ret == SQL_SUCCESS); // Remove DSN - EXPECT_TRUE(UnregisterDsn(dsn)); + EXPECT_TRUE(UnregisterDsn(wdsn)); // Disconnect from ODBC ret = SQLDisconnect(conn); @@ -541,7 +539,7 @@ TEST_F(FlightSQLODBCRemoteTestBase, TestSQLConnectInputUidPwd) { EXPECT_TRUE(ret == SQL_SUCCESS); // Remove DSN - EXPECT_TRUE(UnregisterDsn(dsn)); + EXPECT_TRUE(UnregisterDsn(wdsn)); // Disconnect from ODBC ret = SQLDisconnect(conn); @@ -618,7 +616,7 @@ TEST_F(FlightSQLODBCRemoteTestBase, TestSQLConnectInvalidUid) { VerifyOdbcErrorState(SQL_HANDLE_DBC, conn, std::string("28000")); // Remove DSN - EXPECT_TRUE(UnregisterDsn(dsn)); + EXPECT_TRUE(UnregisterDsn(wdsn)); // Free connection handle ret = SQLFreeHandle(SQL_HANDLE_DBC, conn); @@ -681,7 +679,7 @@ TEST_F(FlightSQLODBCRemoteTestBase, TestSQLConnectDSNPrecedence) { EXPECT_TRUE(ret == SQL_SUCCESS); // Remove DSN - EXPECT_TRUE(UnregisterDsn(dsn)); + EXPECT_TRUE(UnregisterDsn(wdsn)); // Disconnect from ODBC ret = SQLDisconnect(conn); diff --git a/cpp/src/arrow/flight/sql/odbc/tests/odbc_test_suite.cc b/cpp/src/arrow/flight/sql/odbc/tests/odbc_test_suite.cc index c079c3c175c..2d2a8f5d305 100644 --- a/cpp/src/arrow/flight/sql/odbc/tests/odbc_test_suite.cc +++ b/cpp/src/arrow/flight/sql/odbc/tests/odbc_test_suite.cc @@ -262,8 +262,8 @@ bool writeDSN(Connection::ConnPropertyMap properties) { } std::string driver = config.Get(FlightSqlConnection::DRIVER); - - return RegisterDsn(config, driver.c_str()); + std::wstring wDriver = arrow::util::UTF8ToWideString(driver).ValueOr(L""); + return RegisterDsn(config, wDriver.c_str()); } } // namespace integration_tests } // namespace odbc From 809bfaf698a34c02bb0a64c72df65e8192101bbe Mon Sep 17 00:00:00 2001 From: Bryce Mecum Date: Wed, 4 Jun 2025 17:18:26 -0700 Subject: [PATCH 19/63] GH-46439: [C++] Use result pattern for all FromJSONString Helpers (#46696) ### Rationale for this change https://github.com/apache/arrow/issues/45908 brought these helpers into the public API but didn't consider changes to their API. This PR makes all the helpers use the standard Result-pattern to make them more ergonomic. We can do this now without a breaking change because this and https://github.com/apache/arrow/issues/45908 will be part of Arrow 21. ### What changes are included in this PR? - Refactored all FromJSONString helpers to use the Result pattern (instead of using outparams) ### Are these changes tested? Yes. ### Are there any user-facing changes? No. * GitHub Issue: #46439 Lead-authored-by: Bryce Mecum Co-authored-by: Sutou Kouhei Signed-off-by: Sutou Kouhei --- .../arrow/from_json_string_example.cc | 14 ++-- cpp/src/arrow/json/from_string.cc | 39 +++++------ cpp/src/arrow/json/from_string.h | 53 +++++++-------- cpp/src/arrow/json/from_string_test.cc | 66 +++++++++---------- cpp/src/arrow/testing/gtest_util.cc | 14 ++-- python/pyarrow/src/arrow/python/gdb.cc | 16 ++--- 6 files changed, 87 insertions(+), 115 deletions(-) diff --git a/cpp/examples/arrow/from_json_string_example.cc b/cpp/examples/arrow/from_json_string_example.cc index da13d913489..eb919303fee 100644 --- a/cpp/examples/arrow/from_json_string_example.cc +++ b/cpp/examples/arrow/from_json_string_example.cc @@ -68,15 +68,15 @@ arrow::Status RunExample() { "[[11, 22], null, [null, 33]]")); // ChunkedArrayFromJSONString - std::shared_ptr chunked_array; - ARROW_RETURN_NOT_OK(ChunkedArrayFromJSONString( - arrow::int32(), {"[5, 10]", "[null]", "[16]"}, &chunked_array)); + ARROW_ASSIGN_OR_RAISE( + auto chunked_array, + ChunkedArrayFromJSONString(arrow::int32(), {"[5, 10]", "[null]", "[16]"})); // DictArrayFromJSONString - std::shared_ptr dict_array; - ARROW_RETURN_NOT_OK(DictArrayFromJSONString( - dictionary(arrow::int32(), arrow::utf8()), "[0, 1, 0, 2, 0, 3]", - R"(["k1", "k2", "k3", "k4"])", &dict_array)); + ARROW_ASSIGN_OR_RAISE( + auto dict_array, + DictArrayFromJSONString(dictionary(arrow::int32(), arrow::utf8()), + "[0, 1, 0, 2, 0, 3]", R"(["k1", "k2", "k3", "k4"])")); return arrow::Status::OK(); } diff --git a/cpp/src/arrow/json/from_string.cc b/cpp/src/arrow/json/from_string.cc index b2972f7150e..e35a362f5a2 100644 --- a/cpp/src/arrow/json/from_string.cc +++ b/cpp/src/arrow/json/from_string.cc @@ -1004,23 +1004,20 @@ Result> ArrayFromJSONString(const std::shared_ptr& type, - const std::vector& json_strings, - std::shared_ptr* out) { +Result> ChunkedArrayFromJSONString( + const std::shared_ptr& type, const std::vector& json_strings) { ArrayVector out_chunks; out_chunks.reserve(json_strings.size()); for (const std::string& chunk_json : json_strings) { out_chunks.emplace_back(); ARROW_ASSIGN_OR_RAISE(out_chunks.back(), ArrayFromJSONString(type, chunk_json)); } - *out = std::make_shared(std::move(out_chunks), type); - return Status::OK(); + return std::make_shared(std::move(out_chunks), type); } -Status DictArrayFromJSONString(const std::shared_ptr& type, - std::string_view indices_json, - std::string_view dictionary_json, - std::shared_ptr* out) { +Result> DictArrayFromJSONString( + const std::shared_ptr& type, std::string_view indices_json, + std::string_view dictionary_json) { if (type->id() != Type::DICTIONARY) { return Status::TypeError("DictArrayFromJSON requires dictionary type, got ", *type); } @@ -1031,13 +1028,11 @@ Status DictArrayFromJSONString(const std::shared_ptr& type, ArrayFromJSONString(dictionary_type.index_type(), indices_json)); ARROW_ASSIGN_OR_RAISE(auto dictionary, ArrayFromJSONString(dictionary_type.value_type(), dictionary_json)); - - return DictionaryArray::FromArrays(type, std::move(indices), std::move(dictionary)) - .Value(out); + return DictionaryArray::FromArrays(type, std::move(indices), std::move(dictionary)); } -Status ScalarFromJSONString(const std::shared_ptr& type, - std::string_view json_string, std::shared_ptr* out) { +Result> ScalarFromJSONString( + const std::shared_ptr& type, std::string_view json_string) { std::shared_ptr converter; RETURN_NOT_OK(GetConverter(type, &converter)); @@ -1052,13 +1047,12 @@ Status ScalarFromJSONString(const std::shared_ptr& type, RETURN_NOT_OK(converter->AppendValue(json_doc)); RETURN_NOT_OK(converter->Finish(&array)); DCHECK_EQ(array->length(), 1); - return array->GetScalar(0).Value(out); + return array->GetScalar(0); } -Status DictScalarFromJSONString(const std::shared_ptr& type, - std::string_view index_json, - std::string_view dictionary_json, - std::shared_ptr* out) { +Result> DictScalarFromJSONString( + const std::shared_ptr& type, std::string_view index_json, + std::string_view dictionary_json) { if (type->id() != Type::DICTIONARY) { return Status::TypeError("DictScalarFromJSONString requires dictionary type, got ", *type); @@ -1066,14 +1060,13 @@ Status DictScalarFromJSONString(const std::shared_ptr& type, const auto& dictionary_type = checked_cast(*type); - std::shared_ptr index; std::shared_ptr dictionary; - RETURN_NOT_OK(ScalarFromJSONString(dictionary_type.index_type(), index_json, &index)); + ARROW_ASSIGN_OR_RAISE(auto index, + ScalarFromJSONString(dictionary_type.index_type(), index_json)); ARROW_ASSIGN_OR_RAISE( dictionary, ArrayFromJSONString(dictionary_type.value_type(), dictionary_json)); - *out = DictionaryScalar::Make(std::move(index), std::move(dictionary)); - return Status::OK(); + return DictionaryScalar::Make(std::move(index), std::move(dictionary)); } } // namespace json diff --git a/cpp/src/arrow/json/from_string.h b/cpp/src/arrow/json/from_string.h index 03c6b1bcdf4..bd5ed3d46a3 100644 --- a/cpp/src/arrow/json/from_string.h +++ b/cpp/src/arrow/json/from_string.h @@ -47,9 +47,8 @@ namespace json { /// \brief Create an Array from a JSON string /// /// \code {.cpp} -/// std::shared_ptr array = ArrayFromJSONString( -/// int64(), "[2, 3, null, 7, 11]" -/// ).ValueOrDie(); +/// Result> maybe_array = +/// ArrayFromJSONString(int64(), "[2, 3, null, 7, 11]"); /// \endcode ARROW_EXPORT Result> ArrayFromJSONString(const std::shared_ptr&, @@ -68,52 +67,44 @@ Result> ArrayFromJSONString(const std::shared_ptr chunked_array; -/// ChunkedArrayFromJSONString( -/// int64(), {R"([5, 10])", R"([null])", R"([16])"}, &chunked_array -/// ); +/// Result> maybe_chunked_array = +/// ChunkedArrayFromJSONString(int64(), {R"([5, 10])", R"([null])", R"([16])"}); /// \endcode ARROW_EXPORT -Status ChunkedArrayFromJSONString(const std::shared_ptr& type, - const std::vector& json_strings, - std::shared_ptr* out); +Result> ChunkedArrayFromJSONString( + const std::shared_ptr& type, const std::vector& json_strings); /// \brief Create a DictionaryArray from a JSON string /// /// \code {.cpp} -/// std::shared_ptr array; -/// DictArrayFromJSONString( -/// dictionary(int32(), utf8()), -/// "[0, 1, 0, 2, 0, 3]", R"(["k1", "k2", "k3", "k4"])", -/// &array -/// ); +/// Result> maybe_dict_array = +/// DictArrayFromJSONString(dictionary(int32(), utf8()), "[0, 1, 0, 2, 0, 3]", +/// R"(["k1", "k2", "k3", "k4"])"); /// \endcode ARROW_EXPORT -Status DictArrayFromJSONString(const std::shared_ptr&, - std::string_view indices_json, - std::string_view dictionary_json, - std::shared_ptr* out); +Result> DictArrayFromJSONString(const std::shared_ptr&, + std::string_view indices_json, + std::string_view dictionary_json); /// \brief Create a Scalar from a JSON string /// \code {.cpp} -/// std::shared_ptr scalar; -/// ScalarFromJSONString(float64(), "42", &scalar); +/// Result> maybe_scalar = +/// ScalarFromJSONString(float64(), "42", &scalar); /// \endcode ARROW_EXPORT -Status ScalarFromJSONString(const std::shared_ptr&, std::string_view json, - std::shared_ptr* out); +Result> ScalarFromJSONString(const std::shared_ptr&, + std::string_view json); /// \brief Create a DictionaryScalar from a JSON string /// \code {.cpp} -/// std::shared_ptr scalar; -/// DictScalarFromJSONString(dictionary(int32(), utf8()), "3", R"(["k1", "k2", "k3", -/// "k4"])", &scalar); +/// Result> maybe_dict_scalar = +/// DictScalarFromJSONString(dictionary(int32(), utf8()), "3", R"(["k1", "k2", "k3", +/// "k4"])", &scalar); /// \endcode ARROW_EXPORT -Status DictScalarFromJSONString(const std::shared_ptr&, - std::string_view index_json, - std::string_view dictionary_json, - std::shared_ptr* out); +Result> DictScalarFromJSONString( + const std::shared_ptr&, std::string_view index_json, + std::string_view dictionary_json); /// @} diff --git a/cpp/src/arrow/json/from_string_test.cc b/cpp/src/arrow/json/from_string_test.cc index d9fa53f68cb..b70501b5f34 100644 --- a/cpp/src/arrow/json/from_string_test.cc +++ b/cpp/src/arrow/json/from_string_test.cc @@ -149,9 +149,9 @@ template void AssertJSONScalar(const std::shared_ptr& type, const std::string& json, const bool is_valid, const C_TYPE value) { SCOPED_TRACE(json); - std::shared_ptr actual, expected; + std::shared_ptr expected; - ASSERT_OK(ScalarFromJSONString(type, json, &actual)); + ASSERT_OK_AND_ASSIGN(auto actual, ScalarFromJSONString(type, json)); if (is_valid) { ASSERT_OK_AND_ASSIGN(expected, MakeScalar(type, value)); } else { @@ -1471,35 +1471,33 @@ TEST(TestDictArrayFromJSON, Basics) { TEST(TestDictArrayFromJSON, Errors) { auto type = dictionary(int32(), utf8()); - std::shared_ptr array; - ASSERT_RAISES(Invalid, DictArrayFromJSONString(type, "[\"not a valid index\"]", - "[\"\"]", &array)); - ASSERT_RAISES(Invalid, DictArrayFromJSONString(type, "[0, 1]", "[1]", - &array)); // dict value isn't string + ASSERT_RAISES(Invalid, + DictArrayFromJSONString(type, "[\"not a valid index\"]", "[\"\"]")); + ASSERT_RAISES(Invalid, DictArrayFromJSONString(type, "[0, 1]", + "[1]")); // dict value isn't string } TEST(TestChunkedArrayFromJSON, Basics) { auto type = int32(); - std::shared_ptr chunked_array; - ASSERT_OK(ChunkedArrayFromJSONString(type, {}, &chunked_array)); + ASSERT_OK_AND_ASSIGN(auto chunked_array, ChunkedArrayFromJSONString(type, {})); ASSERT_OK(chunked_array->ValidateFull()); ASSERT_EQ(chunked_array->num_chunks(), 0); AssertTypeEqual(type, chunked_array->type()); - ASSERT_OK(ChunkedArrayFromJSONString(type, {"[1, 2]", "[3, null, 4]"}, &chunked_array)); - ASSERT_OK(chunked_array->ValidateFull()); - ASSERT_EQ(chunked_array->num_chunks(), 2); + ASSERT_OK_AND_ASSIGN(auto chunked_array_two, + ChunkedArrayFromJSONString(type, {"[1, 2]", "[3, null, 4]"})); + ASSERT_OK(chunked_array_two->ValidateFull()); + ASSERT_EQ(chunked_array_two->num_chunks(), 2); std::shared_ptr expected_chunk; ASSERT_OK_AND_ASSIGN(expected_chunk, ArrayFromJSONString(type, "[1, 2]")); - AssertArraysEqual(*expected_chunk, *chunked_array->chunk(0), /*verbose=*/true); + AssertArraysEqual(*expected_chunk, *chunked_array_two->chunk(0), /*verbose=*/true); ASSERT_OK_AND_ASSIGN(expected_chunk, ArrayFromJSONString(type, "[3, null, 4]")); - AssertArraysEqual(*expected_chunk, *chunked_array->chunk(1), /*verbose=*/true); + AssertArraysEqual(*expected_chunk, *chunked_array_two->chunk(1), /*verbose=*/true); } TEST(TestScalarFromJSON, Basics) { // Sanity check for common types (not exhaustive) - std::shared_ptr scalar; AssertJSONScalar(int64(), "4", true, 4); AssertJSONScalar(int64(), "null", false, 0); AssertJSONScalar>(utf8(), R"("")", true, @@ -1516,25 +1514,22 @@ TEST(TestScalarFromJSON, Basics) { AssertJSONScalar(boolean(), "1", true, true); AssertJSONScalar(float64(), "1.0", true, 1.0); AssertJSONScalar(float64(), "-0.0", true, -0.0); - ASSERT_OK(ScalarFromJSONString(float64(), "NaN", &scalar)); - ASSERT_TRUE(std::isnan(checked_cast(*scalar).value)); - ASSERT_OK(ScalarFromJSONString(float64(), "Inf", &scalar)); - ASSERT_TRUE(std::isinf(checked_cast(*scalar).value)); + ASSERT_OK_AND_ASSIGN(auto nan_scalar, ScalarFromJSONString(float64(), "NaN")); + ASSERT_TRUE(std::isnan(checked_cast(*nan_scalar).value)); + ASSERT_OK_AND_ASSIGN(auto inf_scalar, ScalarFromJSONString(float64(), "Inf")); + ASSERT_TRUE(std::isinf(checked_cast(*inf_scalar).value)); } TEST(TestScalarFromJSON, Errors) { - std::shared_ptr scalar; - ASSERT_RAISES(Invalid, ScalarFromJSONString(int64(), "[0]", &scalar)); - ASSERT_RAISES(Invalid, ScalarFromJSONString(int64(), "[9223372036854775808]", &scalar)); - ASSERT_RAISES(Invalid, - ScalarFromJSONString(int64(), "[-9223372036854775809]", &scalar)); - ASSERT_RAISES(Invalid, - ScalarFromJSONString(uint64(), "[18446744073709551616]", &scalar)); - ASSERT_RAISES(Invalid, ScalarFromJSONString(uint64(), "[-1]", &scalar)); - ASSERT_RAISES(Invalid, ScalarFromJSONString(binary(), "0", &scalar)); - ASSERT_RAISES(Invalid, ScalarFromJSONString(binary(), "[]", &scalar)); - ASSERT_RAISES(Invalid, ScalarFromJSONString(boolean(), "0.0", &scalar)); - ASSERT_RAISES(Invalid, ScalarFromJSONString(boolean(), "\"true\"", &scalar)); + ASSERT_RAISES(Invalid, ScalarFromJSONString(int64(), "[0]")); + ASSERT_RAISES(Invalid, ScalarFromJSONString(int64(), "[9223372036854775808]")); + ASSERT_RAISES(Invalid, ScalarFromJSONString(int64(), "[-9223372036854775809]")); + ASSERT_RAISES(Invalid, ScalarFromJSONString(uint64(), "[18446744073709551616]")); + ASSERT_RAISES(Invalid, ScalarFromJSONString(uint64(), "[-1]")); + ASSERT_RAISES(Invalid, ScalarFromJSONString(binary(), "0")); + ASSERT_RAISES(Invalid, ScalarFromJSONString(binary(), "[]")); + ASSERT_RAISES(Invalid, ScalarFromJSONString(boolean(), "0.0")); + ASSERT_RAISES(Invalid, ScalarFromJSONString(boolean(), "\"true\"")); } TEST(TestDictScalarFromJSONString, Basics) { @@ -1553,12 +1548,11 @@ TEST(TestDictScalarFromJSONString, Basics) { TEST(TestDictScalarFromJSONString, Errors) { auto type = dictionary(int32(), utf8()); - std::shared_ptr scalar; - ASSERT_RAISES(Invalid, DictScalarFromJSONString(type, "\"not a valid index\"", "[\"\"]", - &scalar)); - ASSERT_RAISES(Invalid, DictScalarFromJSONString(type, "0", "[1]", - &scalar)); // dict value isn't string + ASSERT_RAISES(Invalid, + DictScalarFromJSONString(type, "\"not a valid index\"", "[\"\"]")); + ASSERT_RAISES(Invalid, + DictScalarFromJSONString(type, "0", "[1]")); // dict value isn't string } } // namespace json diff --git a/cpp/src/arrow/testing/gtest_util.cc b/cpp/src/arrow/testing/gtest_util.cc index c49106757e4..1acc47a99d4 100644 --- a/cpp/src/arrow/testing/gtest_util.cc +++ b/cpp/src/arrow/testing/gtest_util.cc @@ -387,15 +387,14 @@ std::shared_ptr ArrayFromJSON(const std::shared_ptr& type, std::shared_ptr DictArrayFromJSON(const std::shared_ptr& type, std::string_view indices_json, std::string_view dictionary_json) { - std::shared_ptr out; - ABORT_NOT_OK(json::DictArrayFromJSONString(type, indices_json, dictionary_json, &out)); + EXPECT_OK_AND_ASSIGN( + auto out, json::DictArrayFromJSONString(type, indices_json, dictionary_json)); return out; } std::shared_ptr ChunkedArrayFromJSON(const std::shared_ptr& type, const std::vector& json) { - std::shared_ptr out; - ABORT_NOT_OK(json::ChunkedArrayFromJSONString(type, json, &out)); + EXPECT_OK_AND_ASSIGN(auto out, json::ChunkedArrayFromJSONString(type, json)); return out; } @@ -411,16 +410,15 @@ std::shared_ptr RecordBatchFromJSON(const std::shared_ptr& std::shared_ptr ScalarFromJSON(const std::shared_ptr& type, std::string_view json) { - std::shared_ptr out; - ABORT_NOT_OK(json::ScalarFromJSONString(type, json, &out)); + EXPECT_OK_AND_ASSIGN(auto out, json::ScalarFromJSONString(type, json)); return out; } std::shared_ptr DictScalarFromJSON(const std::shared_ptr& type, std::string_view index_json, std::string_view dictionary_json) { - std::shared_ptr out; - ABORT_NOT_OK(json::DictScalarFromJSONString(type, index_json, dictionary_json, &out)); + EXPECT_OK_AND_ASSIGN(auto out, + json::DictScalarFromJSONString(type, index_json, dictionary_json)); return out; } diff --git a/python/pyarrow/src/arrow/python/gdb.cc b/python/pyarrow/src/arrow/python/gdb.cc index 38383b86f49..2a7d2eda4bf 100644 --- a/python/pyarrow/src/arrow/python/gdb.cc +++ b/python/pyarrow/src/arrow/python/gdb.cc @@ -363,9 +363,8 @@ void TestSession() { ExtensionScalar extension_scalar_null{extension_scalar.value, extension_scalar_type, /*is_valid=*/false}; - std::shared_ptr heap_map_scalar; - ARROW_CHECK_OK(ScalarFromJSONString(map(utf8(), int32()), R"([["a", 5], ["b", 6]])", - &heap_map_scalar)); + auto heap_map_scalar = + *ScalarFromJSONString(map(utf8(), int32()), R"([["a", 5], ["b", 6]])"); auto heap_map_scalar_null = MakeNullScalar(heap_map_scalar->type); // Array and ArrayData @@ -479,13 +478,10 @@ void TestSession() { key_value_metadata({"key1", "key2", "key3"}, {"value1", "value2", "value3"})); // Table - ChunkedArrayVector table_columns{2}; - ARROW_CHECK_OK( - ChunkedArrayFromJSONString(int32(), {"[1, 2, 3]", "[4, 5]"}, &table_columns[0])); - ARROW_CHECK_OK(ChunkedArrayFromJSONString( - utf8(), {R"(["abc", null])", R"(["def"])", R"(["ghi", "jkl"])"}, - &table_columns[1])); - auto table = Table::Make(batch_schema, table_columns); + auto col1 = ChunkedArrayFromJSONString(int32(), {"[1, 2, 3]", "[4, 5]"}); + auto col2 = ChunkedArrayFromJSONString( + utf8(), {R"(["abc", null])", R"(["def"])", R"(["ghi", "jkl"])"}); + auto table = Table::Make(batch_schema, {*col1, *col2}); // Datum Datum empty_datum{}; From 0fbb9c53d76ae8f6a887817005a999af0e348238 Mon Sep 17 00:00:00 2001 From: Hiroyuki Sato Date: Thu, 5 Jun 2025 09:26:14 +0900 Subject: [PATCH 20/63] GH-46699: [CI][Dev] fix shellcheck errors in the ci/scripts/cpp_test.sh (#46700) ### Rationale for this change `ci/scripts/cpp_test.sh` violates two shellcheck rules. * SC2071: `< is for string comparisons. Use -lt instead.` * SC2086: `Double quote to prevent globbing and word splitting.` ``` ./ci/scripts/cpp_test.sh In ./ci/scripts/cpp_test.sh line 22: if [[ $# < 2 ]]; then ^-- SC2071 (error): < is for string comparisons. Use -lt instead. In ./ci/scripts/cpp_test.sh line 87: pushd ${build_dir} ^----------^ SC2086 (info): Double quote to prevent globbing and word splitting. Did you mean: pushd "${build_dir}" In ./ci/scripts/cpp_test.sh line 103: --parallel ${n_jobs} \ ^-------^ SC2086 (info): Double quote to prevent globbing and word splitting. Did you mean: --parallel "${n_jobs}" \ In ./ci/scripts/cpp_test.sh line 105: --timeout ${ARROW_CTEST_TIMEOUT:-300} \ ^-------------------------^ SC2086 (info): Double quote to prevent globbing and word splitting. Did you mean: --timeout "${ARROW_CTEST_TIMEOUT:-300}" \ In ./ci/scripts/cpp_test.sh line 111: examples=$(find ${binary_output_dir} -executable -name "*example") ^------------------^ SC2086 (info): Double quote to prevent globbing and word splitting. Did you mean: examples=$(find "${binary_output_dir}" -executable -name "*example") In ./ci/scripts/cpp_test.sh line 129: ${binary_output_dir}/arrow-ipc-stream-fuzz ${ARROW_TEST_DATA}/arrow-ipc-stream/crash-* ^------------------^ SC2086 (info): Double quote to prevent globbing and word splitting. ^----------------^ SC2086 (info): Double quote to prevent globbing and word splitting. Did you mean: "${binary_output_dir}"/arrow-ipc-stream-fuzz "${ARROW_TEST_DATA}"/arrow-ipc-stream/crash-* In ./ci/scripts/cpp_test.sh line 130: ${binary_output_dir}/arrow-ipc-stream-fuzz ${ARROW_TEST_DATA}/arrow-ipc-stream/*-testcase-* ^------------------^ SC2086 (info): Double quote to prevent globbing and word splitting. ^----------------^ SC2086 (info): Double quote to prevent globbing and word splitting. Did you mean: "${binary_output_dir}"/arrow-ipc-stream-fuzz "${ARROW_TEST_DATA}"/arrow-ipc-stream/*-testcase-* In ./ci/scripts/cpp_test.sh line 131: ${binary_output_dir}/arrow-ipc-file-fuzz ${ARROW_TEST_DATA}/arrow-ipc-file/*-testcase-* ^------------------^ SC2086 (info): Double quote to prevent globbing and word splitting. ^----------------^ SC2086 (info): Double quote to prevent globbing and word splitting. Did you mean: "${binary_output_dir}"/arrow-ipc-file-fuzz "${ARROW_TEST_DATA}"/arrow-ipc-file/*-testcase-* In ./ci/scripts/cpp_test.sh line 132: ${binary_output_dir}/arrow-ipc-tensor-stream-fuzz ${ARROW_TEST_DATA}/arrow-ipc-tensor-stream/*-testcase-* ^------------------^ SC2086 (info): Double quote to prevent globbing and word splitting. ^----------------^ SC2086 (info): Double quote to prevent globbing and word splitting. Did you mean: "${binary_output_dir}"/arrow-ipc-tensor-stream-fuzz "${ARROW_TEST_DATA}"/arrow-ipc-tensor-stream/*-testcase-* In ./ci/scripts/cpp_test.sh line 134: ${binary_output_dir}/parquet-arrow-fuzz ${ARROW_TEST_DATA}/parquet/fuzzing/*-testcase-* ^------------------^ SC2086 (info): Double quote to prevent globbing and word splitting. ^----------------^ SC2086 (info): Double quote to prevent globbing and word splitting. Did you mean: "${binary_output_dir}"/parquet-arrow-fuzz "${ARROW_TEST_DATA}"/parquet/fuzzing/*-testcase-* For more information: https://www.shellcheck.net/wiki/SC2071 -- < is for string comparisons. Use ... https://www.shellcheck.net/wiki/SC2086 -- Double quote to prevent globbing ... ``` ### What changes are included in this PR? * Use `-lt` instead of `<` * Quote variables. ### Are these changes tested? Yes. ### Are there any user-facing changes? No. * GitHub Issue: #46699 Authored-by: Hiroyuki Sato Signed-off-by: Sutou Kouhei --- .pre-commit-config.yaml | 1 + ci/scripts/cpp_test.sh | 20 ++++++++++---------- 2 files changed, 11 insertions(+), 10 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 1090ec990cf..d70fe523ca9 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -300,6 +300,7 @@ repos: ?^ci/scripts/c_glib_build\.sh$| ?^ci/scripts/c_glib_test\.sh$| ?^ci/scripts/conan_setup\.sh$| + ?^ci/scripts/cpp_test\.sh$| ?^ci/scripts/csharp_build\.sh$| ?^ci/scripts/csharp_pack\.sh$| ?^ci/scripts/download_tz_database\.sh$| diff --git a/ci/scripts/cpp_test.sh b/ci/scripts/cpp_test.sh index e646ba964a5..60d9dd0a3b7 100755 --- a/ci/scripts/cpp_test.sh +++ b/ci/scripts/cpp_test.sh @@ -19,7 +19,7 @@ set -ex -if [[ $# < 2 ]]; then +if [[ $# -lt 2 ]]; then echo "Usage: $0 [ctest args ...]" exit 1 fi @@ -84,7 +84,7 @@ if [ "${ARROW_EMSCRIPTEN:-OFF}" = "ON" ]; then n_jobs=1 # avoid spurious fails on emscripten due to loading too many big executables fi -pushd ${build_dir} +pushd "${build_dir}" if [ -z "${PYTHON}" ] && ! which python > /dev/null 2>&1; then export PYTHON="${PYTHON:-python3}" @@ -100,15 +100,15 @@ else ctest \ --label-regex unittest \ --output-on-failure \ - --parallel ${n_jobs} \ + --parallel "${n_jobs}" \ --repeat until-pass:3 \ - --timeout ${ARROW_CTEST_TIMEOUT:-300} \ + --timeout "${ARROW_CTEST_TIMEOUT:-300}" \ "${ctest_options[@]}" \ "$@" fi if [ "${ARROW_BUILD_EXAMPLES}" == "ON" ]; then - examples=$(find ${binary_output_dir} -executable -name "*example") + examples=$(find "${binary_output_dir}" -executable -name "*example") if [ "${examples}" == "" ]; then echo "==================" echo "No examples found!" @@ -126,12 +126,12 @@ fi if [ "${ARROW_FUZZING}" == "ON" ]; then # Fuzzing regression tests - ${binary_output_dir}/arrow-ipc-stream-fuzz ${ARROW_TEST_DATA}/arrow-ipc-stream/crash-* - ${binary_output_dir}/arrow-ipc-stream-fuzz ${ARROW_TEST_DATA}/arrow-ipc-stream/*-testcase-* - ${binary_output_dir}/arrow-ipc-file-fuzz ${ARROW_TEST_DATA}/arrow-ipc-file/*-testcase-* - ${binary_output_dir}/arrow-ipc-tensor-stream-fuzz ${ARROW_TEST_DATA}/arrow-ipc-tensor-stream/*-testcase-* + "${binary_output_dir}/arrow-ipc-stream-fuzz" "${ARROW_TEST_DATA}"/arrow-ipc-stream/crash-* + "${binary_output_dir}/arrow-ipc-stream-fuzz" "${ARROW_TEST_DATA}"/arrow-ipc-stream/*-testcase-* + "${binary_output_dir}/arrow-ipc-file-fuzz" "${ARROW_TEST_DATA}"/arrow-ipc-file/*-testcase-* + "${binary_output_dir}/arrow-ipc-tensor-stream-fuzz" "${ARROW_TEST_DATA}"/arrow-ipc-tensor-stream/*-testcase-* if [ "${ARROW_PARQUET}" == "ON" ]; then - ${binary_output_dir}/parquet-arrow-fuzz ${ARROW_TEST_DATA}/parquet/fuzzing/*-testcase-* + "${binary_output_dir}/parquet-arrow-fuzz" "${ARROW_TEST_DATA}"/parquet/fuzzing/*-testcase-* fi fi From 8697ca5e54de648e9722fb0c24f0bea85cc355d6 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ra=C3=BAl=20Cumplido?= Date: Thu, 5 Jun 2025 11:06:05 +0200 Subject: [PATCH 21/63] GH-46691: [CI][Packaging] Update platform tag on generated wheel name to match newest auditwheel naming (#46705) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ### Rationale for this change The new version of auditwheel has added improvements to detect libc / platform on the wheels: - https://github.com/pypa/auditwheel/pull/548 This has updated the ordering of the platform tags for some of the generated wheels. For the case of our manylinux_2014 and libc 2.17 but only for Python 3.13 amd64 and 3.13t arm64. The rest are using the old order. ### What changes are included in this PR? Force the newest version and update to new order of platform tags. ### Are these changes tested? Via archery. ### Are there any user-facing changes? No * GitHub Issue: #46691 Authored-by: Raúl Cumplido Signed-off-by: Sutou Kouhei --- ci/docker/python-wheel-manylinux.dockerfile | 4 +++- dev/release/verify-release-candidate.sh | 2 +- dev/tasks/tasks.yml | 4 ++-- 3 files changed, 6 insertions(+), 4 deletions(-) diff --git a/ci/docker/python-wheel-manylinux.dockerfile b/ci/docker/python-wheel-manylinux.dockerfile index 789f1531dd7..fff354e032b 100644 --- a/ci/docker/python-wheel-manylinux.dockerfile +++ b/ci/docker/python-wheel-manylinux.dockerfile @@ -114,7 +114,9 @@ RUN --mount=type=secret,id=github_repository_owner \ rm -rf ~/.config/NuGet/ # Make sure auditwheel is up-to-date -RUN pipx upgrade auditwheel +# Force upgrade version to 6.4.0 or later to ensure platform tags order is correct +# See https://github.com/apache/arrow/pull/46705 +RUN pipx upgrade auditwheel>=6.4.0 # Configure Python for applications running in the bash shell of this Dockerfile ARG python=3.9 diff --git a/dev/release/verify-release-candidate.sh b/dev/release/verify-release-candidate.sh index 98652c52e1f..484561b9d67 100755 --- a/dev/release/verify-release-candidate.sh +++ b/dev/release/verify-release-candidate.sh @@ -882,7 +882,7 @@ test_linux_wheels() { fi local python_versions="${TEST_PYTHON_VERSIONS:-3.9 3.10 3.11 3.12 3.13}" - local platform_tags="${TEST_WHEEL_PLATFORM_TAGS:-manylinux_2_17_${arch}.manylinux2014_${arch} manylinux_2_28_${arch}}" + local platform_tags="${TEST_WHEEL_PLATFORM_TAGS:-manylinux2014_${arch}.manylinux_2_17_${arch} manylinux_2_28_${arch}}" if [ "${SOURCE_KIND}" != "local" ]; then local wheel_content="OFF" diff --git a/dev/tasks/tasks.yml b/dev/tasks/tasks.yml index 0bb4ab4acf6..04d67abfeee 100644 --- a/dev/tasks/tasks.yml +++ b/dev/tasks/tasks.yml @@ -223,9 +223,9 @@ tasks: {############################## Wheel Linux ##################################} -{% for wheel_kind, arch, version, platform_tag in [("manylinux", "amd64", "2014", "manylinux_2_17_x86_64.manylinux2014_x86_64"), +{% for wheel_kind, arch, version, platform_tag in [("manylinux", "amd64", "2014", "manylinux2014_x86_64.manylinux_2_17_x86_64"), ("manylinux", "amd64", "2-28", "manylinux_2_28_x86_64"), - ("manylinux", "arm64", "2014", "manylinux_2_17_aarch64.manylinux2014_aarch64"), + ("manylinux", "arm64", "2014", "manylinux2014_aarch64.manylinux_2_17_aarch64"), ("manylinux", "arm64", "2-28", "manylinux_2_28_aarch64"), ("musllinux", "amd64", "1-2", "musllinux_1_2_x86_64"), ("musllinux", "arm64", "1-2", "musllinux_1_2_aarch64")] %} From 38c2e14cdeb762c51490f74c892b7895cde3b719 Mon Sep 17 00:00:00 2001 From: Nic Crane Date: Thu, 5 Jun 2025 15:50:28 +0100 Subject: [PATCH 22/63] MINOR: Add language-specific prompt to kapa.ai bot (#46637) ### Rationale for this change Asks users to specify language when using the kapa.ai bot ### What changes are included in this PR? Update user instructions ### Are these changes tested? Nah but I'll build the docs here to look ### Are there any user-facing changes? Yeah, the instructions Authored-by: Nic Crane Signed-off-by: Nic Crane --- docs/source/_templates/kapa-ai-bot.html | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/docs/source/_templates/kapa-ai-bot.html b/docs/source/_templates/kapa-ai-bot.html index a97c844589e..8eaede91c67 100644 --- a/docs/source/_templates/kapa-ai-bot.html +++ b/docs/source/_templates/kapa-ai-bot.html @@ -6,7 +6,7 @@ data-project-name="Apache Arrow" data-project-color="#000000" data-project-logo="https://arrow.apache.org/img/arrow-logo_chevrons_white-txt_black-bg.png" - data-modal-disclaimer="This is a custom LLM with access to all [Arrow documentation](https://arrow.apache.org/docs/)." + data-modal-disclaimer="This is a custom LLM with access to all [Arrow documentation](https://arrow.apache.org/docs/). Please include the language you are using in your question, e.g., Python, C++, Java, R, etc." data-consent-required="true" data-consent-screen-disclaimer="By clicking "I agree, let's chat", you consent to the use of the AI assistant in accordance with kapa.ai's [Privacy Policy](https://www.kapa.ai/content/privacy-policy). This service uses reCAPTCHA, which requires your consent to Google's [Privacy Policy](https://policies.google.com/privacy) and [Terms of Service](https://policies.google.com/terms). By proceeding, you explicitly agree to both kapa.ai's and Google's privacy policies." > From 0d4749f2edbfaafc72687f74e78c1d84e1fa24f6 Mon Sep 17 00:00:00 2001 From: Etienne Bacher <52219252+etiennebacher@users.noreply.github.com> Date: Thu, 5 Jun 2025 17:45:30 +0200 Subject: [PATCH 23/63] GH-46636: [R] Fix evaluation of external objects not in global environment in `case_when()` (#46667) ### Rationale for this change When a script is called in an environment that isn't the global environment (for instance with `source("my-script.R", local = new.env())`, `case_when()` would fail to detect external objects used in conditions. This PR fixes this behavior. Fixes #46636 ### What changes are included in this PR? When evaluating expressions in `dplyr` functions, `eval_tidy()` now takes into account `mask` as an environment where it should look for external objects. @ thisisnic suggested in #46636 that the bug might be due to https://github.com/apache/arrow/blob/main/r/R/dplyr-funcs-conditional.R#L116 but I couldn't find a way to fix it there. ### Are these changes tested? I added a test for this scenario. I ensured it failed before the change and succeeds after. ### Are there any user-facing changes? There is one user-facing, non-breaking change, illustrated both in the related issue and in the new test. * GitHub Issue: #46636 Authored-by: etiennebacher Signed-off-by: Nic Crane --- r/NEWS.md | 2 ++ r/R/dplyr-eval.R | 2 +- r/tests/testthat/test-dplyr-funcs-conditional.R | 17 +++++++++++++++++ 3 files changed, 20 insertions(+), 1 deletion(-) diff --git a/r/NEWS.md b/r/NEWS.md index 9e5a694fc23..0be0bc8e0d5 100644 --- a/r/NEWS.md +++ b/r/NEWS.md @@ -24,6 +24,8 @@ - Added bindings for atan, sinh, cosh, tanh, asinh, acosh, and tanh, and expm1 (#44953) - Expose an option `check_directory_existence_before_creation` in `S3FileSystem` to reduce I/O calls on cloud storage (@HaochengLIU, #41998) +- `case_when()` now correctly detects objects that are not in the global + environment (@etiennebacher, #46667). # arrow 20.0.0.1 diff --git a/r/R/dplyr-eval.R b/r/R/dplyr-eval.R index 2dce24117a3..896b47a7799 100644 --- a/r/R/dplyr-eval.R +++ b/r/R/dplyr-eval.R @@ -30,7 +30,7 @@ arrow_eval <- function(expr, mask) { # regular dplyr may work # * validation_error: the expression is known to be not valid, so don't # recommend retrying with regular dplyr - tryCatch(eval_tidy(expr, mask), error = function(e) { + tryCatch(eval_tidy(expr, mask, env = mask), error = function(e) { # Inspect why the expression failed, and add the expr as the `call` # for better error messages msg <- conditionMessage(e) diff --git a/r/tests/testthat/test-dplyr-funcs-conditional.R b/r/tests/testthat/test-dplyr-funcs-conditional.R index 24ddd342a88..3ff93c0a87f 100644 --- a/r/tests/testthat/test-dplyr-funcs-conditional.R +++ b/r/tests/testthat/test-dplyr-funcs-conditional.R @@ -491,3 +491,20 @@ test_that("coalesce()", { class = "validation_error" ) }) + +test_that("external objects are found when they're not in the global environment, #46636", { + dat <- arrow_table(x = c("a", "b")) + pattern <- "a" + expect_identical( + dat %>% + mutate(x2 = case_when(x == pattern ~ "foo")) %>% + collect(), + tibble(x = c("a", "b"), x2 = c("foo", NA)) + ) + expect_identical( + dat %>% + mutate(x2 = if_else(x == pattern, "foo", NA_character_)) %>% + collect(), + tibble(x = c("a", "b"), x2 = c("foo", NA)) + ) +}) From 88480cc198fe5a2281a9119d8c696b09d2792ff9 Mon Sep 17 00:00:00 2001 From: Nic Crane Date: Fri, 6 Jun 2025 01:06:28 +0100 Subject: [PATCH 24/63] GH-46717: [R][Docs] Add missing "internal" keywords for internal function (#46722) ### Rationale for this change pkgdown generation was failing due to a function not being included in the list of functions to document ### What changes are included in this PR? Update roxygen header to not generate that function or trigger the pkgdown check ### Are these changes tested? Nah, but I'll trigger CI to check ### Are there any user-facing changes? No * GitHub Issue: #46717 Authored-by: Nic Crane Signed-off-by: Sutou Kouhei --- r/R/dplyr-funcs-agg.R | 1 + r/man/one.Rd | 9 +++++---- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/r/R/dplyr-funcs-agg.R b/r/R/dplyr-funcs-agg.R index b67af6fc01c..d1f2a2c3f57 100644 --- a/r/R/dplyr-funcs-agg.R +++ b/r/R/dplyr-funcs-agg.R @@ -204,6 +204,7 @@ find_arrow_mask <- function() { #' group_by(cyl) |> #' summarize(x = one(disp)) #' } +#' @keywords internal one <- function(...) { set_agg( fun = "one", diff --git a/r/man/one.Rd b/r/man/one.Rd index 5b672b5c190..654d7895951 100644 --- a/r/man/one.Rd +++ b/r/man/one.Rd @@ -10,10 +10,10 @@ one(...) \item{...}{Unquoted column name to pull values from.} } \description{ -Returns one arbitrary value from the input for each group. The -function is biased towards non-null values: if there is at least one non-null -value for a certain group, that value is returned, and only if all the values -are null for the group will the function return null. +Returns one arbitrary value from the input for each group. The function is +biased towards non-null values: if there is at least one non-null value for a +certain group, that value is returned, and only if all the values are null +for the group will the function return null. } \examples{ \dontrun{ @@ -23,3 +23,4 @@ mtcars |> summarize(x = one(disp)) } } +\keyword{internal} From 9ddaa9f3139b50e61b915e8a1025756304d98e31 Mon Sep 17 00:00:00 2001 From: Bryce Mecum Date: Fri, 6 Jun 2025 11:37:43 -0700 Subject: [PATCH 25/63] GH-46729: [Python] Allow constructing InMemoryDataset from RecordBatchReader (#46731) ### Rationale for this change Our docs say you can construct a Dataset from a RecordBatchReader but you can't. While we can't pass the actual RecordBatchReader to the Dataset as a source (AFAIK), we can at least consume the reader immediately and create an InMemoryDataset from the batches. ### What changes are included in this PR? - Tweaked type checks so this now works (both from ds.dataset and ds.InMemoryDataset) - Test case extended to cover the new behavior - Tweaked error message just to use proper case ### Are these changes tested? Yes. ### Are there any user-facing changes? No. * GitHub Issue: #46729 Authored-by: Bryce Mecum Signed-off-by: Bryce Mecum --- python/pyarrow/_dataset.pyx | 6 +++--- python/pyarrow/dataset.py | 2 +- python/pyarrow/tests/test_dataset.py | 3 ++- 3 files changed, 6 insertions(+), 5 deletions(-) diff --git a/python/pyarrow/_dataset.pyx b/python/pyarrow/_dataset.pyx index 9e5edee5742..478c6b3f7c1 100644 --- a/python/pyarrow/_dataset.pyx +++ b/python/pyarrow/_dataset.pyx @@ -1011,7 +1011,7 @@ cdef class InMemoryDataset(Dataset): if isinstance(source, (pa.RecordBatch, pa.Table)): source = [source] - if isinstance(source, (list, tuple)): + if isinstance(source, (list, tuple, pa.RecordBatchReader)): batches = [] for item in source: if isinstance(item, pa.RecordBatch): @@ -1036,8 +1036,8 @@ cdef class InMemoryDataset(Dataset): pyarrow_unwrap_table(table)) else: raise TypeError( - 'Expected a table, batch, or list of tables/batches ' - 'instead of the given type: ' + + 'Expected a Table, RecordBatch, list of Table/RecordBatch, ' + 'or RecordBatchReader instead of the given type: ' + type(source).__name__ ) diff --git a/python/pyarrow/dataset.py b/python/pyarrow/dataset.py index 26602c1e175..ef4f7288723 100644 --- a/python/pyarrow/dataset.py +++ b/python/pyarrow/dataset.py @@ -804,7 +804,7 @@ def dataset(source, schema=None, format=None, filesystem=None, 'of batches or tables. The given list contains the following ' f'types: {type_names}' ) - elif isinstance(source, (pa.RecordBatch, pa.Table)): + elif isinstance(source, (pa.RecordBatch, pa.Table, pa.RecordBatchReader)): return _in_memory_dataset(source, **kwargs) else: raise TypeError( diff --git a/python/pyarrow/tests/test_dataset.py b/python/pyarrow/tests/test_dataset.py index 4af0f914eb6..c17e038713a 100644 --- a/python/pyarrow/tests/test_dataset.py +++ b/python/pyarrow/tests/test_dataset.py @@ -2558,13 +2558,14 @@ def test_construct_from_invalid_sources_raise(multisourcefs): def test_construct_in_memory(dataset_reader): batch = pa.RecordBatch.from_arrays([pa.array(range(10))], names=["a"]) + reader = pa.RecordBatchReader.from_batches(batch.schema, [batch]) table = pa.Table.from_batches([batch]) dataset_table = ds.dataset([], format='ipc', schema=pa.schema([]) ).to_table() assert dataset_table == pa.table([]) - for source in (batch, table, [batch], [table]): + for source in (batch, table, [batch], [table], reader): dataset = ds.dataset(source) assert dataset_reader.to_table(dataset) == table assert len(list(dataset.get_fragments())) == 1 From 7eb2c0a36c697f0a64c73da010ce84a47b46567c Mon Sep 17 00:00:00 2001 From: William Ayd Date: Fri, 6 Jun 2025 19:43:37 -0400 Subject: [PATCH 26/63] GH-46141: [C++] Add flight directory to Meson configuration (#46142) ### Rationale for this change Continues building out support for Meson as a build system generator ### What changes are included in this PR? This adds the flight directory to the Meson configuration ### Are these changes tested? Locally ### Are there any user-facing changes? No * GitHub Issue: #46141 Authored-by: Will Ayd Signed-off-by: Sutou Kouhei --- cpp/meson.build | 5 +- cpp/meson.options | 7 ++ cpp/src/arrow/flight/meson.build | 205 +++++++++++++++++++++++++++++++ cpp/src/arrow/meson.build | 4 + cpp/subprojects/abseil-cpp.wrap | 125 +++++++++++++++++++ cpp/subprojects/c-ares.wrap | 30 +++++ cpp/subprojects/grpc.wrap | 34 +++++ cpp/subprojects/protobuf.wrap | 33 +++++ 8 files changed, 441 insertions(+), 2 deletions(-) create mode 100644 cpp/src/arrow/flight/meson.build create mode 100644 cpp/subprojects/abseil-cpp.wrap create mode 100644 cpp/subprojects/c-ares.wrap create mode 100644 cpp/subprojects/grpc.wrap create mode 100644 cpp/subprojects/protobuf.wrap diff --git a/cpp/meson.build b/cpp/meson.build index f4d006d31f4..247f3154b96 100644 --- a/cpp/meson.build +++ b/cpp/meson.build @@ -24,7 +24,7 @@ project( meson_version: '>=1.3.0', default_options: [ 'buildtype=release', - 'c_std=c99', + 'c_std=gnu11,c11', 'warning_level=2', 'cpp_std=c++17', ], @@ -67,7 +67,8 @@ needs_filesystem = get_option('filesystem').enabled() or needs_azure or needs_gc needs_integration = get_option('integration').enabled() needs_tests = get_option('tests').enabled() needs_acero = get_option('acero').enabled() -needs_ipc = get_option('ipc').enabled() or needs_tests or needs_acero or needs_benchmarks +needs_flight = get_option('flight').enabled() +needs_ipc = get_option('ipc').enabled() or needs_tests or needs_acero or needs_benchmarks or needs_flight needs_fuzzing = get_option('fuzzing').enabled() needs_testing = (get_option('testing').enabled() or needs_tests diff --git a/cpp/meson.options b/cpp/meson.options index b51d20c8d5b..fed38a29700 100644 --- a/cpp/meson.options +++ b/cpp/meson.options @@ -52,6 +52,13 @@ option( ) option('json', type: 'feature', description: 'Build Arrow with JSON support') + +option( + 'flight', + type: 'feature', + description: 'Build the Arrow Flight RPC System (requires GRPC, Protocol Buffers)', +) + option('git_id', type: 'string') option('git_description', type: 'string') diff --git a/cpp/src/arrow/flight/meson.build b/cpp/src/arrow/flight/meson.build new file mode 100644 index 00000000000..85df8e61cb6 --- /dev/null +++ b/cpp/src/arrow/flight/meson.build @@ -0,0 +1,205 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +install_headers( + [ + 'api.h', + 'client_auth.h', + 'client_cookie_middleware.h', + 'client.h', + 'client_middleware.h', + 'client_tracing_middleware.h', + 'middleware.h', + 'otel_logging.h', + 'platform.h', + 'server_auth.h', + 'server.h', + 'server_middleware.h', + 'server_tracing_middleware.h', + 'test_auth_handlers.h', + 'test_definitions.h', + 'test_flight_server.h', + 'test_util.h', + 'transport.h', + 'transport_server.h', + 'type_fwd.h', + 'types_async.h', + 'types.h', + 'visibility.h', + ], + subdir: 'arrow/flight', +) + +grpc_dep = dependency('grpc++') +protobuf_dep = dependency('protobuf') +abseil_sync_dep = dependency('absl_synchronization') + +fs = import('fs') +protoc = find_program('protoc') + +flight_proto_path = fs.parent(meson.project_source_root()) / 'format' +flight_proto_files = custom_target( + 'arrow-flight-proto-files', + input: [flight_proto_path / 'Flight.proto'], + output: ['Flight.pb.cc', 'Flight.pb.h'], + command: [ + protoc, + '--proto_path=' + flight_proto_path, + '--cpp_out=' + meson.current_build_dir(), + '@INPUT@', + ], +) + +grpc_cpp_plugin = find_program('grpc_cpp_plugin') +flight_proto_grpc_files = custom_target( + 'arrow-flight-proto-grpc-files', + input: [flight_proto_path / 'Flight.proto'], + output: ['Flight.grpc.pb.cc', 'Flight.grpc.pb.h'], + command: [ + protoc, + '--proto_path=' + flight_proto_path, + '--grpc_out=' + meson.current_build_dir(), + '--plugin=protoc-gen-grpc=' + grpc_cpp_plugin.full_path(), + '@INPUT@', + ], +) + +arrow_flight_srcs = [ + 'client.cc', + 'client_cookie_middleware.cc', + 'client_tracing_middleware.cc', + 'cookie_internal.cc', + 'middleware.cc', + 'serialization_internal.cc', + 'server.cc', + 'server_auth.cc', + 'server_tracing_middleware.cc', + 'transport.cc', + 'transport_server.cc', + 'transport/grpc/grpc_client.cc', + 'transport/grpc/grpc_server.cc', + 'transport/grpc/serialization_internal.cc', + 'transport/grpc/protocol_grpc_internal.cc', + 'transport/grpc/util_internal.cc', + 'types.cc', +] + +thread_dep = dependency('threads') + +arrow_flight = library( + 'arrow-flight', + # We intentionally index flight_proto_grpc_files[1] so as to avoid + # adding 'Flight.grpc.pb.cc' to the sources. This is required + # because protocol_grpc_internal.cc includes the source file + # directly; using as a source here will cause a ODR violation + sources: arrow_flight_srcs + [ + flight_proto_files, + flight_proto_grpc_files[1], + ], + dependencies: [ + arrow_dep, + grpc_dep, + protobuf_dep, + abseil_sync_dep, + thread_dep, + ], + cpp_args: '-DARROW_FLIGHT_EXPORTING', +) + +arrow_flight_dep = declare_dependency( + link_with: arrow_flight, + dependencies: [grpc_dep, protobuf_dep, abseil_sync_dep], +) + +if needs_testing + arrow_flight_testing_lib = library( + 'arrow-flight-testing', + sources: [ + 'test_auth_handlers.cc', + 'test_definitions.cc', + 'test_flight_server.cc', + 'test_util.cc', + ], + dependencies: [arrow_test_dep, arrow_flight_dep, thread_dep], + ) + + arrow_flight_test_dep = declare_dependency( + link_with: arrow_flight_testing_lib, + dependencies: [arrow_flight_dep], + ) +else + arrow_flight_test_dep = disabler() +endif + +flight_tests = ['flight_internals_test', 'flight_test'] +foreach flight_test : flight_tests + test_name = '@0@'.format(flight_test.replace('_', '-')) + exc = executable( + test_name, + sources: [ + '@0@.cc'.format(flight_test), + # flight_internals_test.cc transitively includes Flight.grpc.pb.h + # so we must declare that here to avoid a race condition + flight_proto_grpc_files[1], + ], + dependencies: [arrow_test_dep, arrow_flight_test_dep], + ) + test(test_name, exc) +endforeach + +flight_test_dep_no_main = [ + arrow_dep, + arrow_flight_test_dep, + gtest_dep, + gmock_dep, + gflags_dep, +] + +if needs_tests or needs_benchmarks + executable( + 'flight-test-server', + sources: ['test_server.cc'], + dependencies: flight_test_dep_no_main, + ) +endif + +if needs_benchmarks + server_proto_path = meson.project_source_root() / 'src' / 'arrow' / 'flight' + flight_proto_files = custom_target( + 'arrow-flight-benchmark-perf-proto-files', + input: [server_proto_path / 'perf.proto'], + output: ['perf.pb.cc', 'perf.pb.h'], + command: [ + protoc, + '--proto_path=' + meson.current_source_dir(), + '--cpp_out=' + meson.current_build_dir(), + '@INPUT@', + ], + ) + + executable( + 'arrow-flight-perf-server', + sources: ['perf_server.cc'] + flight_proto_files, + dependencies: flight_test_dep_no_main, + ) + + executable( + 'arrow-flight-benchmark', + sources: ['flight_benchmark.cc'] + flight_proto_files, + dependencies: flight_test_dep_no_main, + ) +endif diff --git a/cpp/src/arrow/meson.build b/cpp/src/arrow/meson.build index 018cb2dac54..cad76c39db8 100644 --- a/cpp/src/arrow/meson.build +++ b/cpp/src/arrow/meson.build @@ -720,6 +720,10 @@ if needs_filesystem subdir('filesystem') endif +if needs_flight + subdir('flight') +endif + if needs_json subdir('json') endif diff --git a/cpp/subprojects/abseil-cpp.wrap b/cpp/subprojects/abseil-cpp.wrap new file mode 100644 index 00000000000..54fc2280c7c --- /dev/null +++ b/cpp/subprojects/abseil-cpp.wrap @@ -0,0 +1,125 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +[wrap-file] +directory = abseil-cpp-20240722.0 +source_url = https://github.com/abseil/abseil-cpp/releases/download/20240722.0/abseil-cpp-20240722.0.tar.gz +source_filename = abseil-cpp-20240722.0.tar.gz +source_hash = f50e5ac311a81382da7fa75b97310e4b9006474f9560ac46f54a9967f07d4ae3 +patch_filename = abseil-cpp_20240722.0-3_patch.zip +patch_url = https://wrapdb.mesonbuild.com/v2/abseil-cpp_20240722.0-3/get_patch +patch_hash = 12dd8df1488a314c53e3751abd2750cf233b830651d168b6a9f15e7d0cf71f7b +source_fallback_url = https://github.com/mesonbuild/wrapdb/releases/download/abseil-cpp_20240722.0-3/abseil-cpp-20240722.0.tar.gz +wrapdb_version = 20240722.0-3 + +[provide] +absl_base = absl_base_dep +absl_container = absl_container_dep +absl_debugging = absl_debugging_dep +absl_log = absl_log_dep +absl_flags = absl_flags_dep +absl_hash = absl_hash_dep +absl_crc = absl_crc_dep +absl_numeric = absl_numeric_dep +absl_profiling = absl_profiling_dep +absl_random = absl_random_dep +absl_status = absl_status_dep +absl_strings = absl_strings_dep +absl_synchronization = absl_synchronization_dep +absl_time = absl_time_dep +absl_types = absl_types_dep +absl_algorithm_container = absl_base_dep +absl_any_invocable = absl_base_dep +absl_bad_any_cast_impl = absl_types_dep +absl_bad_optional_access = absl_types_dep +absl_bad_variant_access = absl_types_dep +absl_bind_front = absl_base_dep +absl_city = absl_hash_dep +absl_civil_time = absl_time_dep +absl_cleanup = absl_base_dep +absl_cord = absl_strings_dep +absl_cord_internal = absl_strings_dep +absl_cordz_functions = absl_strings_dep +absl_cordz_handle = absl_strings_dep +absl_cordz_info = absl_strings_dep +absl_cordz_sample_token = absl_strings_dep +absl_core_headers = absl_base_dep +absl_crc32c = absl_crc_dep +absl_debugging_internal = absl_debugging_dep +absl_demangle_internal = absl_debugging_dep +absl_die_if_null = absl_log_dep +absl_examine_stack = absl_debugging_dep +absl_exponential_biased = absl_profiling_dep +absl_failure_signal_handler = absl_debugging_dep +absl_flags_commandlineflag = absl_flags_dep +absl_flags_commandlineflag_internal = absl_flags_dep +absl_flags_config = absl_flags_dep +absl_flags_internal = absl_flags_dep +absl_flags_marshalling = absl_flags_dep +absl_flags_parse = absl_flags_dep +absl_flags_private_handle_accessor = absl_flags_dep +absl_flags_program_name = absl_flags_dep +absl_flags_reflection = absl_flags_dep +absl_flags_usage = absl_flags_dep +absl_flags_usage_internal = absl_flags_dep +absl_flat_hash_map = absl_container_dep +absl_flat_hash_set = absl_container_dep +absl_function_ref = absl_base_dep +absl_graphcycles_internal = absl_synchronization_dep +absl_hashtablez_sampler = absl_container_dep +absl_inlined_vector = absl_container_dep +absl_int128 = absl_numeric_dep +absl_leak_check = absl_debugging_dep +absl_log_initialize = absl_log_dep +absl_log_internal_check_op = absl_log_dep +absl_log_internal_message = absl_log_dep +absl_log_severity = absl_base_dep +absl_low_level_hash = absl_hash_dep +absl_memory = absl_base_dep +absl_optional = absl_types_dep +absl_periodic_sampler = absl_profiling_dep +absl_random_bit_gen_ref = absl_random_dep +absl_random_distributions = absl_random_dep +absl_random_internal_distribution_test_util = absl_random_dep +absl_random_internal_platform = absl_random_dep +absl_random_internal_pool_urbg = absl_random_dep +absl_random_internal_randen = absl_random_dep +absl_random_internal_randen_hwaes = absl_random_dep +absl_random_internal_randen_hwaes_impl = absl_random_dep +absl_random_internal_randen_slow = absl_random_dep +absl_random_internal_seed_material = absl_random_dep +absl_random_random = absl_random_dep +absl_random_seed_gen_exception = absl_random_dep +absl_random_seed_sequences = absl_random_dep +absl_raw_hash_set = absl_container_dep +absl_raw_logging_internal = absl_base_dep +absl_scoped_set_env = absl_base_dep +absl_span = absl_types_dep +absl_spinlock_wait = absl_base_dep +absl_stacktrace = absl_debugging_dep +absl_statusor = absl_status_dep +absl_str_format = absl_strings_dep +absl_str_format_internal = absl_strings_dep +absl_strerror = absl_base_dep +absl_string_view = absl_strings_dep +absl_strings_internal = absl_strings_dep +absl_symbolize = absl_debugging_dep +absl_throw_delegate = absl_base_dep +absl_time_zone = absl_time_dep +absl_type_traits = absl_base_dep +absl_utility = absl_base_dep +absl_variant = absl_types_dep diff --git a/cpp/subprojects/c-ares.wrap b/cpp/subprojects/c-ares.wrap new file mode 100644 index 00000000000..276abf81ba0 --- /dev/null +++ b/cpp/subprojects/c-ares.wrap @@ -0,0 +1,30 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +[wrap-file] +directory = c-ares-1.34.4 +source_url = https://github.com/c-ares/c-ares/releases/download/v1.34.4/c-ares-1.34.4.tar.gz +source_filename = c-ares-1.34.4.tar.gz +source_hash = fa38dbed659ee4cc5a32df5e27deda575fa6852c79a72ba1af85de35a6ae222f +patch_filename = c-ares_1.34.4-1_patch.zip +patch_url = https://wrapdb.mesonbuild.com/v2/c-ares_1.34.4-1/get_patch +patch_hash = 0d334f610c714412629dd3e0a0dc2c7ced21744cf952c2d63d1eeb7680e4929d +source_fallback_url = https://github.com/mesonbuild/wrapdb/releases/download/c-ares_1.34.4-1/c-ares-1.34.4.tar.gz +wrapdb_version = 1.34.4-1 + +[provide] +libcares = cares_dep diff --git a/cpp/subprojects/grpc.wrap b/cpp/subprojects/grpc.wrap new file mode 100644 index 00000000000..cc8744a1980 --- /dev/null +++ b/cpp/subprojects/grpc.wrap @@ -0,0 +1,34 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +[wrap-file] +directory = grpc-1.59.1 +source_url = https://github.com/grpc/grpc/archive/refs/tags/v1.59.1.tar.gz +source_filename = grpc-1.59.1.tar.gz +source_hash = 916f88a34f06b56432611aaa8c55befee96d0a7b7d7457733b9deeacbc016f99 +patch_filename = grpc_1.59.1-1_patch.zip +patch_url = https://wrapdb.mesonbuild.com/v2/grpc_1.59.1-1/get_patch +patch_hash = 61055ee4df79b0d98e0db8cdd9b534eb9d8ac71f7a8d2219b6efb905419fbe2b +source_fallback_url = https://github.com/mesonbuild/wrapdb/releases/download/grpc_1.59.1-1/grpc-1.59.1.tar.gz +wrapdb_version = 1.59.1-1 + +[provide] +grpc = grpc_dep +grpc_unsecure = grpc_unsecure_dep +grpc++ = grpcpp_dep +grpc++_unsecure = grpcpp_unsecure_dep +program_names = grpc_cpp_plugin, grpc_python_plugin, grpc_ruby_plugin, grpc_php_plugin, grpc_node_plugin diff --git a/cpp/subprojects/protobuf.wrap b/cpp/subprojects/protobuf.wrap new file mode 100644 index 00000000000..b579a1b7358 --- /dev/null +++ b/cpp/subprojects/protobuf.wrap @@ -0,0 +1,33 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +[wrap-file] +directory = protobuf-25.2 +source_url = https://github.com/protocolbuffers/protobuf/releases/download/v25.2/protobuf-25.2.tar.gz +source_filename = protobuf-25.2.tar.gz +source_hash = 8ff511a64fc46ee792d3fe49a5a1bcad6f7dc50dfbba5a28b0e5b979c17f9871 +patch_filename = protobuf_25.2-2_patch.zip +patch_url = https://wrapdb.mesonbuild.com/v2/protobuf_25.2-2/get_patch +patch_hash = a2f5968097eb036c228b72258435d09e93dca4093d09acb5078a376d8155df46 +source_fallback_url = https://github.com/mesonbuild/wrapdb/releases/download/protobuf_25.2-2/protobuf-25.2.tar.gz +wrapdb_version = 25.2-2 + +[provide] +protobuf = protobuf_dep +protobuf-lite = protobuf_lite_dep +protoc = protoc_dep +program_names = protoc From 6350435217eb21ba4297ec3d6aedbcbe4795b167 Mon Sep 17 00:00:00 2001 From: Arash Andishgar <42874930+andishgar@users.noreply.github.com> Date: Sun, 8 Jun 2025 01:24:18 +0330 Subject: [PATCH 27/63] GH-46395: [C++][Statistics] Use EqualOptions for min and max in arrow::ArrayStatistics::Equals() (#46422) ### Rationale for this change `arrow::ArrayStatistics::Equals` does not handle double values for `ArrayStatistics::ValueType` correctly ### What changes are included in this PR? Add `arrow::EqualOptions` to `arrow::ArrayStatistics::Eqauls()` Add `arrow::ArrayStatisticsEqauls()` Add `EqualOptions::use_atol_` Add `EqualOptions::use_atol()` Add `EqualOptions::use_atol(bool v)` ### Are these changes tested? Yes, I ran the relevant unit tests. ### Are there any user-facing changes? Yes. Add `arrow::ArrayStatisticsEqauls()` Add `EqualOptions::use_atol()` Add `EqualOptions::use_atol(bool v)` * GitHub Issue: #46395 Authored-by: Arash Andishgar Signed-off-by: Sutou Kouhei --- cpp/src/arrow/array/statistics.h | 17 +++++-- cpp/src/arrow/array/statistics_test.cc | 64 ++++++++++++++++++++++++-- cpp/src/arrow/compare.cc | 54 ++++++++++++++++++++++ cpp/src/arrow/compare.h | 23 +++++++++ 4 files changed, 148 insertions(+), 10 deletions(-) diff --git a/cpp/src/arrow/array/statistics.h b/cpp/src/arrow/array/statistics.h index 5380debe3b6..6accd48af77 100644 --- a/cpp/src/arrow/array/statistics.h +++ b/cpp/src/arrow/array/statistics.h @@ -22,6 +22,7 @@ #include #include +#include "arrow/compare.h" #include "arrow/type.h" #include "arrow/util/visibility.h" @@ -127,11 +128,17 @@ struct ARROW_EXPORT ArrayStatistics { /// \brief Whether the maximum value is exact or not bool is_max_exact = false; - /// \brief Check two statistics for equality - bool Equals(const ArrayStatistics& other) const { - return null_count == other.null_count && distinct_count == other.distinct_count && - min == other.min && is_min_exact == other.is_min_exact && max == other.max && - is_max_exact == other.is_max_exact; + /// \brief Check two \ref arrow::ArrayStatistics for equality + /// + /// \param other The \ref arrow::ArrayStatistics instance to compare against. + /// + /// \param equal_options Options used to compare double values for equality. + /// + /// \return True if the two \ref arrow::ArrayStatistics instances are equal; otherwise, + /// false. + bool Equals(const ArrayStatistics& other, + const EqualOptions& equal_options = EqualOptions::Defaults()) const { + return ArrayStatisticsEquals(*this, other, equal_options); } /// \brief Check two statistics for equality diff --git a/cpp/src/arrow/array/statistics_test.cc b/cpp/src/arrow/array/statistics_test.cc index cf15a5d3829..95199a9683b 100644 --- a/cpp/src/arrow/array/statistics_test.cc +++ b/cpp/src/arrow/array/statistics_test.cc @@ -15,13 +15,17 @@ // specific language governing permissions and limitations // under the License. +#include +#include + #include #include "arrow/array/statistics.h" +#include "arrow/compare.h" namespace arrow { -TEST(ArrayStatisticsTest, TestNullCount) { +TEST(TestArrayStatistics, NullCount) { ArrayStatistics statistics; ASSERT_FALSE(statistics.null_count.has_value()); statistics.null_count = 29; @@ -29,7 +33,7 @@ TEST(ArrayStatisticsTest, TestNullCount) { ASSERT_EQ(29, statistics.null_count.value()); } -TEST(ArrayStatisticsTest, TestDistinctCount) { +TEST(TestArrayStatistics, DistinctCount) { ArrayStatistics statistics; ASSERT_FALSE(statistics.distinct_count.has_value()); statistics.distinct_count = 29; @@ -37,7 +41,7 @@ TEST(ArrayStatisticsTest, TestDistinctCount) { ASSERT_EQ(29, statistics.distinct_count.value()); } -TEST(ArrayStatisticsTest, TestMin) { +TEST(TestArrayStatistics, Min) { ArrayStatistics statistics; ASSERT_FALSE(statistics.min.has_value()); ASSERT_FALSE(statistics.is_min_exact); @@ -49,7 +53,7 @@ TEST(ArrayStatisticsTest, TestMin) { ASSERT_TRUE(statistics.is_min_exact); } -TEST(ArrayStatisticsTest, TestMax) { +TEST(TestArrayStatistics, Max) { ArrayStatistics statistics; ASSERT_FALSE(statistics.max.has_value()); ASSERT_FALSE(statistics.is_max_exact); @@ -61,7 +65,7 @@ TEST(ArrayStatisticsTest, TestMax) { ASSERT_FALSE(statistics.is_max_exact); } -TEST(ArrayStatisticsTest, TestEquality) { +TEST(TestArrayStatistics, EqualityNonDoulbeValue) { ArrayStatistics statistics1; ArrayStatistics statistics2; @@ -96,6 +100,56 @@ TEST(ArrayStatisticsTest, TestEquality) { ASSERT_NE(statistics1, statistics2); statistics2.is_max_exact = true; ASSERT_EQ(statistics1, statistics2); + + // Test different ArrayStatistics::ValueType + statistics1.max = static_cast(29); + statistics1.max = static_cast(29); + ASSERT_NE(statistics1, statistics2); +} + +class TestArrayStatisticsEqualityDoubleValue : public ::testing::Test { + protected: + ArrayStatistics statistics1_; + ArrayStatistics statistics2_; + EqualOptions options_ = EqualOptions::Defaults(); +}; + +TEST_F(TestArrayStatisticsEqualityDoubleValue, ExactValue) { + statistics2_.min = 29.0; + statistics1_.min = 29.0; + ASSERT_EQ(statistics1_, statistics2_); + statistics2_.min = 30.0; + ASSERT_NE(statistics1_, statistics2_); +} + +TEST_F(TestArrayStatisticsEqualityDoubleValue, SignedZero) { + statistics1_.min = +0.0; + statistics2_.min = -0.0; + ASSERT_TRUE(statistics1_.Equals(statistics2_, options_.signed_zeros_equal(true))); + ASSERT_FALSE(statistics1_.Equals(statistics2_, options_.signed_zeros_equal(false))); +} + +TEST_F(TestArrayStatisticsEqualityDoubleValue, Infinity) { + auto infinity = std::numeric_limits::infinity(); + statistics1_.min = infinity; + statistics2_.min = infinity; + ASSERT_EQ(statistics1_, statistics2_); + statistics1_.min = -infinity; + ASSERT_NE(statistics1_, statistics2_); +} + +TEST_F(TestArrayStatisticsEqualityDoubleValue, NaN) { + statistics1_.min = std::numeric_limits::quiet_NaN(); + statistics2_.min = std::numeric_limits::quiet_NaN(); + ASSERT_TRUE(statistics1_.Equals(statistics2_, options_.nans_equal(true))); + ASSERT_FALSE(statistics1_.Equals(statistics2_, options_.nans_equal(false))); +} + +TEST_F(TestArrayStatisticsEqualityDoubleValue, ApproximateEquals) { + statistics1_.max = 0.5001f; + statistics2_.max = 0.5; + ASSERT_FALSE(statistics1_.Equals(statistics2_, options_.atol(1e-3).use_atol(false))); + ASSERT_TRUE(statistics1_.Equals(statistics2_, options_.atol(1e-3))); } } // namespace arrow diff --git a/cpp/src/arrow/compare.cc b/cpp/src/arrow/compare.cc index 3b64a8fd09f..2460afbf87c 100644 --- a/cpp/src/arrow/compare.cc +++ b/cpp/src/arrow/compare.cc @@ -24,13 +24,16 @@ #include #include #include +#include #include #include #include +#include #include #include "arrow/array.h" #include "arrow/array/diff.h" +#include "arrow/array/statistics.h" #include "arrow/buffer.h" #include "arrow/scalar.h" #include "arrow/sparse_tensor.h" @@ -1523,4 +1526,55 @@ bool TypeEquals(const DataType& left, const DataType& right, bool check_metadata } } +namespace { + +bool DoubleEquals(const double& left, const double& right, const EqualOptions& options) { + bool result; + auto visitor = [&](auto&& compare_func) { result = compare_func(left, right); }; + VisitFloatingEquality(options, options.use_atol(), std::move(visitor)); + return result; +} + +bool ArrayStatisticsValueTypeEquals( + const std::optional& left, + const std::optional& right, const EqualOptions& options) { + if (!left.has_value() || !right.has_value()) { + return left.has_value() == right.has_value(); + } else if (left->index() != right->index()) { + return false; + } else { + auto EqualsVisitor = [&](const auto& v1, const auto& v2) { + using type_1 = std::decay_t; + using type_2 = std::decay_t; + if constexpr (std::conjunction_v, + std::is_same>) { + return DoubleEquals(v1, v2, options); + } else if constexpr (std::is_same_v) { + return v1 == v2; + } + // It is unreachable + DCHECK(false); + return false; + }; + return std::visit(EqualsVisitor, left.value(), right.value()); + } +} + +bool ArrayStatisticsEqualsImpl(const ArrayStatistics& left, const ArrayStatistics& right, + const EqualOptions& equal_options) { + return left.null_count == right.null_count && + left.distinct_count == right.distinct_count && + left.is_min_exact == right.is_min_exact && + left.is_max_exact == right.is_max_exact && + ArrayStatisticsValueTypeEquals(left.min, right.min, equal_options) && + ArrayStatisticsValueTypeEquals(left.max, right.max, equal_options); +} + +} // namespace + +bool ArrayStatisticsEquals(const ArrayStatistics& left, const ArrayStatistics& right, + const EqualOptions& options) { + return ArrayStatisticsEqualsImpl(left, right, options); +} + } // namespace arrow diff --git a/cpp/src/arrow/compare.h b/cpp/src/arrow/compare.h index 6b365c59913..ec7dc8bda18 100644 --- a/cpp/src/arrow/compare.h +++ b/cpp/src/arrow/compare.h @@ -27,6 +27,7 @@ namespace arrow { +struct ArrayStatistics; class Array; class DataType; class Tensor; @@ -58,7 +59,18 @@ class EqualOptions { return res; } + /// Whether the "atol" property is used in the comparison. + bool use_atol() const { return use_atol_; } + + /// Return a new EqualOptions object with the "use_atol" property changed. + EqualOptions use_atol(bool v) const { + auto res = EqualOptions(*this); + res.use_atol_ = v; + return res; + } + /// The absolute tolerance for approximate comparisons of floating-point values. + /// Note that this option is ignored if "use_atol" is set to false. double atol() const { return atol_; } /// Return a new EqualOptions object with the "atol" property changed. @@ -87,6 +99,7 @@ class EqualOptions { double atol_ = kDefaultAbsoluteTolerance; bool nans_equal_ = false; bool signed_zeros_equal_ = true; + bool use_atol_ = true; std::ostream* diff_sink_ = NULLPTR; }; @@ -135,6 +148,16 @@ ARROW_EXPORT bool SparseTensorEquals(const SparseTensor& left, const SparseTenso ARROW_EXPORT bool TypeEquals(const DataType& left, const DataType& right, bool check_metadata = true); +/// \brief Check two \ref arrow::ArrayStatistics for equality +/// \param[in] left an \ref arrow::ArrayStatistics +/// \param[in] right an \ref arrow::ArrayStatistics +/// \param[in] options Options used to compare double values for equality. +/// \return True if the two \ref arrow::ArrayStatistics instances are equal; otherwise, +/// false. +ARROW_EXPORT bool ArrayStatisticsEquals( + const ArrayStatistics& left, const ArrayStatistics& right, + const EqualOptions& options = EqualOptions::Defaults()); + /// Returns true if scalars are equal /// \param[in] left a Scalar /// \param[in] right a Scalar From 686889d7969459e8a5eb38cea78b6c23605c16e9 Mon Sep 17 00:00:00 2001 From: Antoine Pitrou Date: Mon, 9 Jun 2025 09:55:01 +0200 Subject: [PATCH 28/63] GH-46459: [C++] Make some arrow/util headers internal (#46721) ### Rationale for this change Historically, we've been lax about selecting which APIs are public. A lot of internal APIs are exposed publicly. ### What changes are included in this PR? Make some headers in `arrow/util` internal. They won't be installed and so won't be available for third-party usage. Note that this represents a subset of all internal APIs in `arrow/util`, as some of them are included in other public headers. ### Are these changes tested? Yes, by existing CI configurations. ### Are there any user-facing changes? Unless the user was relying on internal APIs, there should not be any change. * GitHub Issue: #46459 Lead-authored-by: Antoine Pitrou Co-authored-by: Sutou Kouhei Signed-off-by: Antoine Pitrou --- cpp/src/arrow/acero/partition_util.h | 1 + cpp/src/arrow/array/data.cc | 2 +- cpp/src/arrow/array/util.cc | 2 +- cpp/src/arrow/array/validate.cc | 2 +- cpp/src/arrow/compare.cc | 2 +- .../compute/kernels/aggregate_tdigest.cc | 2 +- .../compute/kernels/hash_aggregate_numeric.cc | 2 +- cpp/src/arrow/csv/converter.cc | 2 +- cpp/src/arrow/dataset/dataset_writer.cc | 2 +- cpp/src/arrow/dataset/file_base.cc | 2 +- cpp/src/arrow/datum.cc | 2 +- cpp/src/arrow/extension/fixed_shape_tensor.cc | 4 ++-- .../extension/fixed_shape_tensor_test.cc | 2 +- cpp/src/arrow/extension/tensor_internal.h | 2 +- cpp/src/arrow/filesystem/s3_internal.h | 2 +- cpp/src/arrow/filesystem/test_util.h | 2 +- cpp/src/arrow/flight/flight_benchmark.cc | 4 ++-- cpp/src/arrow/io/memory.cc | 2 +- cpp/src/arrow/json/parser.cc | 4 ++-- cpp/src/arrow/sparse_tensor_test.cc | 2 +- cpp/src/arrow/tensor/csf_converter.cc | 2 +- .../arrow/util/bit_stream_utils_internal.h | 2 +- cpp/src/arrow/util/bit_util_test.cc | 2 +- ...bitset_stack.h => bitset_stack_internal.h} | 0 cpp/src/arrow/util/bpacking.cc | 14 +++++++------- cpp/src/arrow/util/bpacking64_codegen.py | 2 +- ...efault.h => bpacking64_default_internal.h} | 0 cpp/src/arrow/util/bpacking_avx2.cc | 2 +- ...acking_avx2.h => bpacking_avx2_internal.h} | 0 cpp/src/arrow/util/bpacking_avx512.cc | 2 +- ...ng_avx512.h => bpacking_avx512_internal.h} | 0 ..._default.h => bpacking_default_internal.h} | 0 .../util/{bpacking.h => bpacking_internal.h} | 0 cpp/src/arrow/util/bpacking_neon.cc | 2 +- ...acking_neon.h => bpacking_neon_internal.h} | 0 .../bpacking_simd128_generated_internal.h | 2 +- .../bpacking_simd256_generated_internal.h | 2 +- .../bpacking_simd512_generated_internal.h | 2 +- cpp/src/arrow/util/bpacking_simd_codegen.py | 2 +- cpp/src/arrow/util/bpacking_simd_internal.h | 2 +- cpp/src/arrow/util/counting_semaphore.cc | 2 +- ...aphore.h => counting_semaphore_internal.h} | 0 cpp/src/arrow/util/counting_semaphore_test.cc | 2 +- cpp/src/arrow/util/delimiting.cc | 1 + cpp/src/arrow/util/dict_util.cc | 3 ++- .../{dict_util.h => dict_util_internal.h} | 0 .../util/{dispatch.h => dispatch_internal.h} | 0 ...version.h => double_conversion_internal.h} | 0 cpp/src/arrow/util/formatting.cc | 2 +- cpp/src/arrow/util/formatting.h | 3 +-- cpp/src/arrow/util/key_value_metadata.cc | 2 +- cpp/src/arrow/util/{map.h => map_internal.h} | 0 cpp/src/arrow/util/memory.cc | 2 +- .../util/{memory.h => memory_internal.h} | 0 cpp/src/arrow/util/meson.build | 19 ------------------- .../arrow/util/{print.h => print_internal.h} | 0 .../arrow/util/{sort.h => sort_internal.h} | 0 .../util/{spaced.h => spaced_internal.h} | 0 cpp/src/arrow/util/stl_util_test.cc | 2 +- .../{stopwatch.h => stopwatch_internal.h} | 0 cpp/src/arrow/util/tdigest.cc | 2 +- cpp/src/arrow/util/tdigest_benchmark.cc | 2 +- .../util/{tdigest.h => tdigest_internal.h} | 0 cpp/src/arrow/util/tdigest_test.cc | 2 +- cpp/src/arrow/util/trie.cc | 2 +- cpp/src/arrow/util/trie_benchmark.cc | 2 +- .../arrow/util/{trie.h => trie_internal.h} | 0 cpp/src/arrow/util/trie_test.cc | 2 +- cpp/src/gandiva/gdv_function_stubs.cc | 2 +- cpp/src/gandiva/gdv_string_function_stubs.cc | 2 +- cpp/src/parquet/decoder.cc | 2 +- cpp/src/parquet/encoder.cc | 2 +- cpp/src/parquet/level_comparison.cc | 2 +- 73 files changed, 63 insertions(+), 80 deletions(-) rename cpp/src/arrow/util/{bitset_stack.h => bitset_stack_internal.h} (100%) rename cpp/src/arrow/util/{bpacking64_default.h => bpacking64_default_internal.h} (100%) rename cpp/src/arrow/util/{bpacking_avx2.h => bpacking_avx2_internal.h} (100%) rename cpp/src/arrow/util/{bpacking_avx512.h => bpacking_avx512_internal.h} (100%) rename cpp/src/arrow/util/{bpacking_default.h => bpacking_default_internal.h} (100%) rename cpp/src/arrow/util/{bpacking.h => bpacking_internal.h} (100%) rename cpp/src/arrow/util/{bpacking_neon.h => bpacking_neon_internal.h} (100%) rename cpp/src/arrow/util/{counting_semaphore.h => counting_semaphore_internal.h} (100%) rename cpp/src/arrow/util/{dict_util.h => dict_util_internal.h} (100%) rename cpp/src/arrow/util/{dispatch.h => dispatch_internal.h} (100%) rename cpp/src/arrow/util/{double_conversion.h => double_conversion_internal.h} (100%) rename cpp/src/arrow/util/{map.h => map_internal.h} (100%) rename cpp/src/arrow/util/{memory.h => memory_internal.h} (100%) rename cpp/src/arrow/util/{print.h => print_internal.h} (100%) rename cpp/src/arrow/util/{sort.h => sort_internal.h} (100%) rename cpp/src/arrow/util/{spaced.h => spaced_internal.h} (100%) rename cpp/src/arrow/util/{stopwatch.h => stopwatch_internal.h} (100%) rename cpp/src/arrow/util/{tdigest.h => tdigest_internal.h} (100%) rename cpp/src/arrow/util/{trie.h => trie_internal.h} (100%) diff --git a/cpp/src/arrow/acero/partition_util.h b/cpp/src/arrow/acero/partition_util.h index d02e9cb03f0..52cc47bb8a9 100644 --- a/cpp/src/arrow/acero/partition_util.h +++ b/cpp/src/arrow/acero/partition_util.h @@ -22,6 +22,7 @@ #include #include #include + #include "arrow/acero/util.h" #include "arrow/buffer.h" #include "arrow/util/pcg_random.h" diff --git a/cpp/src/arrow/array/data.cc b/cpp/src/arrow/array/data.cc index 2e55668fb96..b2fe52f9bbe 100644 --- a/cpp/src/arrow/array/data.cc +++ b/cpp/src/arrow/array/data.cc @@ -34,7 +34,7 @@ #include "arrow/type_traits.h" #include "arrow/util/binary_view_util.h" #include "arrow/util/bitmap_ops.h" -#include "arrow/util/dict_util.h" +#include "arrow/util/dict_util_internal.h" #include "arrow/util/logging_internal.h" #include "arrow/util/macros.h" #include "arrow/util/range.h" diff --git a/cpp/src/arrow/array/util.cc b/cpp/src/arrow/array/util.cc index 3180b66a1f7..8bc8a20c550 100644 --- a/cpp/src/arrow/array/util.cc +++ b/cpp/src/arrow/array/util.cc @@ -43,7 +43,7 @@ #include "arrow/util/decimal.h" #include "arrow/util/endian.h" #include "arrow/util/logging_internal.h" -#include "arrow/util/sort.h" +#include "arrow/util/sort_internal.h" #include "arrow/util/span.h" #include "arrow/visit_data_inline.h" #include "arrow/visit_type_inline.h" diff --git a/cpp/src/arrow/array/validate.cc b/cpp/src/arrow/array/validate.cc index 3c9148ebb29..bd0d00126d5 100644 --- a/cpp/src/arrow/array/validate.cc +++ b/cpp/src/arrow/array/validate.cc @@ -30,7 +30,7 @@ #include "arrow/util/int_util_overflow.h" #include "arrow/util/logging_internal.h" #include "arrow/util/ree_util.h" -#include "arrow/util/sort.h" +#include "arrow/util/sort_internal.h" #include "arrow/util/string.h" #include "arrow/util/unreachable.h" #include "arrow/util/utf8.h" diff --git a/cpp/src/arrow/compare.cc b/cpp/src/arrow/compare.cc index 2460afbf87c..a7df9efdbbb 100644 --- a/cpp/src/arrow/compare.cc +++ b/cpp/src/arrow/compare.cc @@ -51,7 +51,7 @@ #include "arrow/util/key_value_metadata.h" #include "arrow/util/logging_internal.h" #include "arrow/util/macros.h" -#include "arrow/util/memory.h" +#include "arrow/util/memory_internal.h" #include "arrow/util/ree_util.h" #include "arrow/visit_scalar_inline.h" #include "arrow/visit_type_inline.h" diff --git a/cpp/src/arrow/compute/kernels/aggregate_tdigest.cc b/cpp/src/arrow/compute/kernels/aggregate_tdigest.cc index 83d01091b3c..10fa00689da 100644 --- a/cpp/src/arrow/compute/kernels/aggregate_tdigest.cc +++ b/cpp/src/arrow/compute/kernels/aggregate_tdigest.cc @@ -19,7 +19,7 @@ #include "arrow/compute/kernels/aggregate_internal.h" #include "arrow/compute/kernels/common_internal.h" #include "arrow/util/bit_run_reader.h" -#include "arrow/util/tdigest.h" +#include "arrow/util/tdigest_internal.h" namespace arrow { namespace compute { diff --git a/cpp/src/arrow/compute/kernels/hash_aggregate_numeric.cc b/cpp/src/arrow/compute/kernels/hash_aggregate_numeric.cc index 4a318942af6..6e0652efe69 100644 --- a/cpp/src/arrow/compute/kernels/hash_aggregate_numeric.cc +++ b/cpp/src/arrow/compute/kernels/hash_aggregate_numeric.cc @@ -32,7 +32,7 @@ #include "arrow/util/checked_cast.h" #include "arrow/util/int128_internal.h" #include "arrow/util/span.h" -#include "arrow/util/tdigest.h" +#include "arrow/util/tdigest_internal.h" #include "arrow/visit_type_inline.h" namespace arrow::compute::internal { diff --git a/cpp/src/arrow/csv/converter.cc b/cpp/src/arrow/csv/converter.cc index 3e991126d1f..ec31d4b1ceb 100644 --- a/cpp/src/arrow/csv/converter.cc +++ b/cpp/src/arrow/csv/converter.cc @@ -36,7 +36,7 @@ #include "arrow/type_traits.h" #include "arrow/util/checked_cast.h" #include "arrow/util/decimal.h" -#include "arrow/util/trie.h" +#include "arrow/util/trie_internal.h" #include "arrow/util/utf8_internal.h" #include "arrow/util/value_parsing.h" // IWYU pragma: keep diff --git a/cpp/src/arrow/dataset/dataset_writer.cc b/cpp/src/arrow/dataset/dataset_writer.cc index 7b454b09757..43895374aa7 100644 --- a/cpp/src/arrow/dataset/dataset_writer.cc +++ b/cpp/src/arrow/dataset/dataset_writer.cc @@ -29,7 +29,7 @@ #include "arrow/table.h" #include "arrow/util/future.h" #include "arrow/util/logging_internal.h" -#include "arrow/util/map.h" +#include "arrow/util/map_internal.h" #include "arrow/util/string.h" #include "arrow/util/tracing_internal.h" diff --git a/cpp/src/arrow/dataset/file_base.cc b/cpp/src/arrow/dataset/file_base.cc index 61f7c5e732a..b03818314c0 100644 --- a/cpp/src/arrow/dataset/file_base.cc +++ b/cpp/src/arrow/dataset/file_base.cc @@ -46,7 +46,7 @@ #include "arrow/util/iterator.h" #include "arrow/util/logging_internal.h" #include "arrow/util/macros.h" -#include "arrow/util/map.h" +#include "arrow/util/map_internal.h" #include "arrow/util/string.h" #include "arrow/util/task_group.h" #include "arrow/util/tracing_internal.h" diff --git a/cpp/src/arrow/datum.cc b/cpp/src/arrow/datum.cc index d7125d1f6c9..39900780986 100644 --- a/cpp/src/arrow/datum.cc +++ b/cpp/src/arrow/datum.cc @@ -31,7 +31,7 @@ #include "arrow/table.h" #include "arrow/util/byte_size.h" #include "arrow/util/logging_internal.h" -#include "arrow/util/memory.h" +#include "arrow/util/memory_internal.h" namespace arrow { diff --git a/cpp/src/arrow/extension/fixed_shape_tensor.cc b/cpp/src/arrow/extension/fixed_shape_tensor.cc index 1b195e4c9f5..d8fed85b1ed 100644 --- a/cpp/src/arrow/extension/fixed_shape_tensor.cc +++ b/cpp/src/arrow/extension/fixed_shape_tensor.cc @@ -28,8 +28,8 @@ #include "arrow/tensor.h" #include "arrow/util/int_util_overflow.h" #include "arrow/util/logging_internal.h" -#include "arrow/util/print.h" -#include "arrow/util/sort.h" +#include "arrow/util/print_internal.h" +#include "arrow/util/sort_internal.h" #include "arrow/util/string.h" #include diff --git a/cpp/src/arrow/extension/fixed_shape_tensor_test.cc b/cpp/src/arrow/extension/fixed_shape_tensor_test.cc index 51aea4b25fd..cfc12656966 100644 --- a/cpp/src/arrow/extension/fixed_shape_tensor_test.cc +++ b/cpp/src/arrow/extension/fixed_shape_tensor_test.cc @@ -28,7 +28,7 @@ #include "arrow/tensor.h" #include "arrow/testing/gtest_util.h" #include "arrow/util/key_value_metadata.h" -#include "arrow/util/sort.h" +#include "arrow/util/sort_internal.h" namespace arrow { diff --git a/cpp/src/arrow/extension/tensor_internal.h b/cpp/src/arrow/extension/tensor_internal.h index 069880cb17c..ed5a7ffc790 100644 --- a/cpp/src/arrow/extension/tensor_internal.h +++ b/cpp/src/arrow/extension/tensor_internal.h @@ -21,7 +21,7 @@ #include #include "arrow/status.h" -#include "arrow/util/print.h" +#include "arrow/util/print_internal.h" namespace arrow::internal { diff --git a/cpp/src/arrow/filesystem/s3_internal.h b/cpp/src/arrow/filesystem/s3_internal.h index 772387e5fb6..f408b18ebd3 100644 --- a/cpp/src/arrow/filesystem/s3_internal.h +++ b/cpp/src/arrow/filesystem/s3_internal.h @@ -37,7 +37,7 @@ #include "arrow/status.h" #include "arrow/util/base64.h" #include "arrow/util/logging.h" -#include "arrow/util/print.h" +#include "arrow/util/print_internal.h" #include "arrow/util/string.h" #ifndef ARROW_AWS_SDK_VERSION_CHECK diff --git a/cpp/src/arrow/filesystem/test_util.h b/cpp/src/arrow/filesystem/test_util.h index 3a643b7e9f0..3217cc8ca36 100644 --- a/cpp/src/arrow/filesystem/test_util.h +++ b/cpp/src/arrow/filesystem/test_util.h @@ -25,7 +25,7 @@ #include "arrow/filesystem/filesystem.h" #include "arrow/filesystem/mockfs.h" #include "arrow/testing/visibility.h" -#include "arrow/util/counting_semaphore.h" +#include "arrow/util/counting_semaphore_internal.h" namespace arrow { namespace fs { diff --git a/cpp/src/arrow/flight/flight_benchmark.cc b/cpp/src/arrow/flight/flight_benchmark.cc index 49e54d98f66..aa6e16820ef 100644 --- a/cpp/src/arrow/flight/flight_benchmark.cc +++ b/cpp/src/arrow/flight/flight_benchmark.cc @@ -31,8 +31,8 @@ #include "arrow/testing/gtest_util.h" #include "arrow/util/compression.h" #include "arrow/util/config.h" -#include "arrow/util/stopwatch.h" -#include "arrow/util/tdigest.h" +#include "arrow/util/stopwatch_internal.h" +#include "arrow/util/tdigest_internal.h" #include "arrow/util/thread_pool.h" #include "arrow/flight/api.h" diff --git a/cpp/src/arrow/io/memory.cc b/cpp/src/arrow/io/memory.cc index 1ae03aeb143..d7b118b3982 100644 --- a/cpp/src/arrow/io/memory.cc +++ b/cpp/src/arrow/io/memory.cc @@ -31,7 +31,7 @@ #include "arrow/util/io_util.h" #include "arrow/util/logging_internal.h" #include "arrow/util/macros.h" -#include "arrow/util/memory.h" +#include "arrow/util/memory_internal.h" namespace arrow { namespace io { diff --git a/cpp/src/arrow/json/parser.cc b/cpp/src/arrow/json/parser.cc index dbc32489937..53f856d8012 100644 --- a/cpp/src/arrow/json/parser.cc +++ b/cpp/src/arrow/json/parser.cc @@ -35,10 +35,10 @@ #include "arrow/array/builder_binary.h" #include "arrow/buffer_builder.h" #include "arrow/type.h" -#include "arrow/util/bitset_stack.h" +#include "arrow/util/bitset_stack_internal.h" #include "arrow/util/checked_cast.h" #include "arrow/util/logging_internal.h" -#include "arrow/util/trie.h" +#include "arrow/util/trie_internal.h" #include "arrow/visit_type_inline.h" namespace arrow { diff --git a/cpp/src/arrow/sparse_tensor_test.cc b/cpp/src/arrow/sparse_tensor_test.cc index 73477fef4a4..c9c28a11b1b 100644 --- a/cpp/src/arrow/sparse_tensor_test.cc +++ b/cpp/src/arrow/sparse_tensor_test.cc @@ -33,7 +33,7 @@ #include "arrow/testing/util.h" #include "arrow/type.h" #include "arrow/util/logging_internal.h" -#include "arrow/util/sort.h" +#include "arrow/util/sort_internal.h" namespace arrow { diff --git a/cpp/src/arrow/tensor/csf_converter.cc b/cpp/src/arrow/tensor/csf_converter.cc index 2d925ddbbb0..f6470e16b78 100644 --- a/cpp/src/arrow/tensor/csf_converter.cc +++ b/cpp/src/arrow/tensor/csf_converter.cc @@ -29,7 +29,7 @@ #include "arrow/status.h" #include "arrow/type.h" #include "arrow/util/checked_cast.h" -#include "arrow/util/sort.h" +#include "arrow/util/sort_internal.h" #include "arrow/visit_type_inline.h" namespace arrow { diff --git a/cpp/src/arrow/util/bit_stream_utils_internal.h b/cpp/src/arrow/util/bit_stream_utils_internal.h index d59e88a3449..9d67c278bcc 100644 --- a/cpp/src/arrow/util/bit_stream_utils_internal.h +++ b/cpp/src/arrow/util/bit_stream_utils_internal.h @@ -24,7 +24,7 @@ #include #include "arrow/util/bit_util.h" -#include "arrow/util/bpacking.h" +#include "arrow/util/bpacking_internal.h" #include "arrow/util/logging.h" #include "arrow/util/macros.h" #include "arrow/util/ubsan.h" diff --git a/cpp/src/arrow/util/bit_util_test.cc b/cpp/src/arrow/util/bit_util_test.cc index 02f583e0110..fcaeb49261d 100644 --- a/cpp/src/arrow/util/bit_util_test.cc +++ b/cpp/src/arrow/util/bit_util_test.cc @@ -50,7 +50,7 @@ #include "arrow/util/bitmap_reader.h" #include "arrow/util/bitmap_visit.h" #include "arrow/util/bitmap_writer.h" -#include "arrow/util/bitset_stack.h" +#include "arrow/util/bitset_stack_internal.h" #include "arrow/util/endian.h" #include "arrow/util/ubsan.h" diff --git a/cpp/src/arrow/util/bitset_stack.h b/cpp/src/arrow/util/bitset_stack_internal.h similarity index 100% rename from cpp/src/arrow/util/bitset_stack.h rename to cpp/src/arrow/util/bitset_stack_internal.h diff --git a/cpp/src/arrow/util/bpacking.cc b/cpp/src/arrow/util/bpacking.cc index 56ddd376293..326dd050fe1 100644 --- a/cpp/src/arrow/util/bpacking.cc +++ b/cpp/src/arrow/util/bpacking.cc @@ -15,22 +15,22 @@ // specific language governing permissions and limitations // under the License. -#include "arrow/util/bpacking.h" +#include "arrow/util/bpacking_internal.h" -#include "arrow/util/bpacking64_default.h" -#include "arrow/util/bpacking_default.h" +#include "arrow/util/bpacking64_default_internal.h" +#include "arrow/util/bpacking_default_internal.h" #include "arrow/util/cpu_info.h" -#include "arrow/util/dispatch.h" +#include "arrow/util/dispatch_internal.h" #include "arrow/util/logging_internal.h" #if defined(ARROW_HAVE_RUNTIME_AVX2) -# include "arrow/util/bpacking_avx2.h" +# include "arrow/util/bpacking_avx2_internal.h" #endif #if defined(ARROW_HAVE_RUNTIME_AVX512) -# include "arrow/util/bpacking_avx512.h" +# include "arrow/util/bpacking_avx512_internal.h" #endif #if defined(ARROW_HAVE_NEON) -# include "arrow/util/bpacking_neon.h" +# include "arrow/util/bpacking_neon_internal.h" #endif namespace arrow { diff --git a/cpp/src/arrow/util/bpacking64_codegen.py b/cpp/src/arrow/util/bpacking64_codegen.py index 88dc93ca657..22135fcbb23 100644 --- a/cpp/src/arrow/util/bpacking64_codegen.py +++ b/cpp/src/arrow/util/bpacking64_codegen.py @@ -21,7 +21,7 @@ # https://github.com/lemire/FrameOfReference/blob/146948b6058a976bc7767262ad3a2ce201486b93/scripts/turbopacking64.py # Usage: -# python bpacking64_codegen.py > bpacking64_default.h +# python bpacking64_codegen.py > bpacking64_default_internal.h def howmany(bit): """ how many values are we going to pack? """ diff --git a/cpp/src/arrow/util/bpacking64_default.h b/cpp/src/arrow/util/bpacking64_default_internal.h similarity index 100% rename from cpp/src/arrow/util/bpacking64_default.h rename to cpp/src/arrow/util/bpacking64_default_internal.h diff --git a/cpp/src/arrow/util/bpacking_avx2.cc b/cpp/src/arrow/util/bpacking_avx2.cc index 9105aaa2af4..02510a07b9f 100644 --- a/cpp/src/arrow/util/bpacking_avx2.cc +++ b/cpp/src/arrow/util/bpacking_avx2.cc @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. -#include "arrow/util/bpacking_avx2.h" +#include "arrow/util/bpacking_avx2_internal.h" #include "arrow/util/bpacking_simd256_generated_internal.h" #include "arrow/util/bpacking_simd_internal.h" diff --git a/cpp/src/arrow/util/bpacking_avx2.h b/cpp/src/arrow/util/bpacking_avx2_internal.h similarity index 100% rename from cpp/src/arrow/util/bpacking_avx2.h rename to cpp/src/arrow/util/bpacking_avx2_internal.h diff --git a/cpp/src/arrow/util/bpacking_avx512.cc b/cpp/src/arrow/util/bpacking_avx512.cc index 3570bcc352b..6272ef1cde8 100644 --- a/cpp/src/arrow/util/bpacking_avx512.cc +++ b/cpp/src/arrow/util/bpacking_avx512.cc @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. -#include "arrow/util/bpacking_avx512.h" +#include "arrow/util/bpacking_avx512_internal.h" #include "arrow/util/bpacking_simd512_generated_internal.h" #include "arrow/util/bpacking_simd_internal.h" diff --git a/cpp/src/arrow/util/bpacking_avx512.h b/cpp/src/arrow/util/bpacking_avx512_internal.h similarity index 100% rename from cpp/src/arrow/util/bpacking_avx512.h rename to cpp/src/arrow/util/bpacking_avx512_internal.h diff --git a/cpp/src/arrow/util/bpacking_default.h b/cpp/src/arrow/util/bpacking_default_internal.h similarity index 100% rename from cpp/src/arrow/util/bpacking_default.h rename to cpp/src/arrow/util/bpacking_default_internal.h diff --git a/cpp/src/arrow/util/bpacking.h b/cpp/src/arrow/util/bpacking_internal.h similarity index 100% rename from cpp/src/arrow/util/bpacking.h rename to cpp/src/arrow/util/bpacking_internal.h diff --git a/cpp/src/arrow/util/bpacking_neon.cc b/cpp/src/arrow/util/bpacking_neon.cc index 3ab6de75f4c..72b520e8cf1 100644 --- a/cpp/src/arrow/util/bpacking_neon.cc +++ b/cpp/src/arrow/util/bpacking_neon.cc @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. -#include "arrow/util/bpacking_neon.h" +#include "arrow/util/bpacking_neon_internal.h" #include "arrow/util/bpacking_simd128_generated_internal.h" #include "arrow/util/bpacking_simd_internal.h" diff --git a/cpp/src/arrow/util/bpacking_neon.h b/cpp/src/arrow/util/bpacking_neon_internal.h similarity index 100% rename from cpp/src/arrow/util/bpacking_neon.h rename to cpp/src/arrow/util/bpacking_neon_internal.h diff --git a/cpp/src/arrow/util/bpacking_simd128_generated_internal.h b/cpp/src/arrow/util/bpacking_simd128_generated_internal.h index 4b2c97c0a7d..5beecad4210 100644 --- a/cpp/src/arrow/util/bpacking_simd128_generated_internal.h +++ b/cpp/src/arrow/util/bpacking_simd128_generated_internal.h @@ -24,7 +24,7 @@ #include -#include "arrow/util/dispatch.h" +#include "arrow/util/dispatch_internal.h" #include "arrow/util/ubsan.h" namespace arrow { diff --git a/cpp/src/arrow/util/bpacking_simd256_generated_internal.h b/cpp/src/arrow/util/bpacking_simd256_generated_internal.h index 8b1756d3fc1..3dccb1745f7 100644 --- a/cpp/src/arrow/util/bpacking_simd256_generated_internal.h +++ b/cpp/src/arrow/util/bpacking_simd256_generated_internal.h @@ -24,7 +24,7 @@ #include -#include "arrow/util/dispatch.h" +#include "arrow/util/dispatch_internal.h" #include "arrow/util/ubsan.h" namespace arrow { diff --git a/cpp/src/arrow/util/bpacking_simd512_generated_internal.h b/cpp/src/arrow/util/bpacking_simd512_generated_internal.h index deeb423353a..4f2aeaeeb4b 100644 --- a/cpp/src/arrow/util/bpacking_simd512_generated_internal.h +++ b/cpp/src/arrow/util/bpacking_simd512_generated_internal.h @@ -24,7 +24,7 @@ #include -#include "arrow/util/dispatch.h" +#include "arrow/util/dispatch_internal.h" #include "arrow/util/ubsan.h" namespace arrow { diff --git a/cpp/src/arrow/util/bpacking_simd_codegen.py b/cpp/src/arrow/util/bpacking_simd_codegen.py index 581a19a53e5..9464908c021 100755 --- a/cpp/src/arrow/util/bpacking_simd_codegen.py +++ b/cpp/src/arrow/util/bpacking_simd_codegen.py @@ -164,7 +164,7 @@ def main(simd_width): #include - #include "arrow/util/dispatch.h" + #include "arrow/util/dispatch_internal.h" #include "arrow/util/ubsan.h" namespace arrow {{ diff --git a/cpp/src/arrow/util/bpacking_simd_internal.h b/cpp/src/arrow/util/bpacking_simd_internal.h index fa5a6689a56..98e192e7cb0 100644 --- a/cpp/src/arrow/util/bpacking_simd_internal.h +++ b/cpp/src/arrow/util/bpacking_simd_internal.h @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. -#include "arrow/util/dispatch.h" +#include "arrow/util/dispatch_internal.h" #include "arrow/util/logging.h" namespace arrow { diff --git a/cpp/src/arrow/util/counting_semaphore.cc b/cpp/src/arrow/util/counting_semaphore.cc index b3106a6f824..de9750ba9ee 100644 --- a/cpp/src/arrow/util/counting_semaphore.cc +++ b/cpp/src/arrow/util/counting_semaphore.cc @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. -#include "arrow/util/counting_semaphore.h" +#include "arrow/util/counting_semaphore_internal.h" #include #include diff --git a/cpp/src/arrow/util/counting_semaphore.h b/cpp/src/arrow/util/counting_semaphore_internal.h similarity index 100% rename from cpp/src/arrow/util/counting_semaphore.h rename to cpp/src/arrow/util/counting_semaphore_internal.h diff --git a/cpp/src/arrow/util/counting_semaphore_test.cc b/cpp/src/arrow/util/counting_semaphore_test.cc index 4de11ce852a..6635b1ddd7d 100644 --- a/cpp/src/arrow/util/counting_semaphore_test.cc +++ b/cpp/src/arrow/util/counting_semaphore_test.cc @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. -#include "arrow/util/counting_semaphore.h" +#include "arrow/util/counting_semaphore_internal.h" #include #include diff --git a/cpp/src/arrow/util/delimiting.cc b/cpp/src/arrow/util/delimiting.cc index 4794293e0b4..0bc1b45f6bf 100644 --- a/cpp/src/arrow/util/delimiting.cc +++ b/cpp/src/arrow/util/delimiting.cc @@ -16,6 +16,7 @@ // under the License. #include "arrow/util/delimiting.h" + #include "arrow/buffer.h" #include "arrow/util/logging_internal.h" diff --git a/cpp/src/arrow/util/dict_util.cc b/cpp/src/arrow/util/dict_util.cc index feab2324a40..c93517140ca 100644 --- a/cpp/src/arrow/util/dict_util.cc +++ b/cpp/src/arrow/util/dict_util.cc @@ -15,7 +15,8 @@ // specific language governing permissions and limitations // under the License. -#include "arrow/util/dict_util.h" +#include "arrow/util/dict_util_internal.h" + #include "arrow/array/array_dict.h" #include "arrow/util/bit_util.h" #include "arrow/util/checked_cast.h" diff --git a/cpp/src/arrow/util/dict_util.h b/cpp/src/arrow/util/dict_util_internal.h similarity index 100% rename from cpp/src/arrow/util/dict_util.h rename to cpp/src/arrow/util/dict_util_internal.h diff --git a/cpp/src/arrow/util/dispatch.h b/cpp/src/arrow/util/dispatch_internal.h similarity index 100% rename from cpp/src/arrow/util/dispatch.h rename to cpp/src/arrow/util/dispatch_internal.h diff --git a/cpp/src/arrow/util/double_conversion.h b/cpp/src/arrow/util/double_conversion_internal.h similarity index 100% rename from cpp/src/arrow/util/double_conversion.h rename to cpp/src/arrow/util/double_conversion_internal.h diff --git a/cpp/src/arrow/util/formatting.cc b/cpp/src/arrow/util/formatting.cc index 97567d86321..58dadd0b11e 100644 --- a/cpp/src/arrow/util/formatting.cc +++ b/cpp/src/arrow/util/formatting.cc @@ -17,7 +17,7 @@ #include "arrow/util/formatting.h" #include "arrow/util/config.h" -#include "arrow/util/double_conversion.h" +#include "arrow/util/double_conversion_internal.h" #include "arrow/util/float16.h" #include "arrow/util/logging_internal.h" diff --git a/cpp/src/arrow/util/formatting.h b/cpp/src/arrow/util/formatting.h index f2e3622ce60..844b6fb91a8 100644 --- a/cpp/src/arrow/util/formatting.h +++ b/cpp/src/arrow/util/formatting.h @@ -30,9 +30,8 @@ #include #include "arrow/status.h" -#include "arrow/type.h" +#include "arrow/type_fwd.h" #include "arrow/type_traits.h" -#include "arrow/util/double_conversion.h" #include "arrow/util/macros.h" #include "arrow/util/string.h" #include "arrow/util/time.h" diff --git a/cpp/src/arrow/util/key_value_metadata.cc b/cpp/src/arrow/util/key_value_metadata.cc index 4390a4cb795..48e02c61202 100644 --- a/cpp/src/arrow/util/key_value_metadata.cc +++ b/cpp/src/arrow/util/key_value_metadata.cc @@ -30,7 +30,7 @@ #include "arrow/status.h" #include "arrow/util/key_value_metadata.h" #include "arrow/util/logging_internal.h" -#include "arrow/util/sort.h" +#include "arrow/util/sort_internal.h" using std::size_t; diff --git a/cpp/src/arrow/util/map.h b/cpp/src/arrow/util/map_internal.h similarity index 100% rename from cpp/src/arrow/util/map.h rename to cpp/src/arrow/util/map_internal.h diff --git a/cpp/src/arrow/util/memory.cc b/cpp/src/arrow/util/memory.cc index e91009d5860..20f5ca7aae8 100644 --- a/cpp/src/arrow/util/memory.cc +++ b/cpp/src/arrow/util/memory.cc @@ -18,7 +18,7 @@ #include #include "arrow/util/logging.h" -#include "arrow/util/memory.h" +#include "arrow/util/memory_internal.h" #include "arrow/util/thread_pool.h" namespace arrow { diff --git a/cpp/src/arrow/util/memory.h b/cpp/src/arrow/util/memory_internal.h similarity index 100% rename from cpp/src/arrow/util/memory.h rename to cpp/src/arrow/util/memory_internal.h diff --git a/cpp/src/arrow/util/meson.build b/cpp/src/arrow/util/meson.build index c398a599842..584ea3ffe4c 100644 --- a/cpp/src/arrow/util/meson.build +++ b/cpp/src/arrow/util/meson.build @@ -117,14 +117,7 @@ install_headers( 'bitmap_visit.h', 'bitmap_writer.h', 'bit_run_reader.h', - 'bitset_stack.h', 'bit_util.h', - 'bpacking64_default.h', - 'bpacking_avx2.h', - 'bpacking_avx512.h', - 'bpacking_default.h', - 'bpacking.h', - 'bpacking_neon.h', 'byte_size.h', 'cancel.h', 'checked_cast.h', @@ -132,15 +125,11 @@ install_headers( 'compression.h', 'concurrent_map.h', 'converter.h', - 'counting_semaphore.h', 'cpu_info.h', 'crc32.h', 'debug.h', 'decimal.h', 'delimiting.h', - 'dict_util.h', - 'dispatch.h', - 'double_conversion.h', 'endian.h', 'float16.h', 'formatting.h', @@ -158,14 +147,11 @@ install_headers( 'logger.h', 'logging.h', 'macros.h', - 'map.h', 'math_constants.h', - 'memory.h', 'mutex.h', 'parallel.h', 'pcg_random.h', 'prefetch.h', - 'print.h', 'queue.h', 'range.h', 'ree_util.h', @@ -173,19 +159,14 @@ install_headers( 'rows_to_batches.h', 'simd.h', 'small_vector.h', - 'sort.h', - 'spaced.h', 'span.h', - 'stopwatch.h', 'string_builder.h', 'string.h', 'task_group.h', - 'tdigest.h', 'test_common.h', 'thread_pool.h', 'time.h', 'tracing.h', - 'trie.h', 'type_fwd.h', 'type_traits.h', 'ubsan.h', diff --git a/cpp/src/arrow/util/print.h b/cpp/src/arrow/util/print_internal.h similarity index 100% rename from cpp/src/arrow/util/print.h rename to cpp/src/arrow/util/print_internal.h diff --git a/cpp/src/arrow/util/sort.h b/cpp/src/arrow/util/sort_internal.h similarity index 100% rename from cpp/src/arrow/util/sort.h rename to cpp/src/arrow/util/sort_internal.h diff --git a/cpp/src/arrow/util/spaced.h b/cpp/src/arrow/util/spaced_internal.h similarity index 100% rename from cpp/src/arrow/util/spaced.h rename to cpp/src/arrow/util/spaced_internal.h diff --git a/cpp/src/arrow/util/stl_util_test.cc b/cpp/src/arrow/util/stl_util_test.cc index 3f16051f1df..836469bc065 100644 --- a/cpp/src/arrow/util/stl_util_test.cc +++ b/cpp/src/arrow/util/stl_util_test.cc @@ -22,7 +22,7 @@ #include #include "arrow/testing/gtest_util.h" -#include "arrow/util/sort.h" +#include "arrow/util/sort_internal.h" #include "arrow/util/string.h" #include "arrow/util/vector.h" diff --git a/cpp/src/arrow/util/stopwatch.h b/cpp/src/arrow/util/stopwatch_internal.h similarity index 100% rename from cpp/src/arrow/util/stopwatch.h rename to cpp/src/arrow/util/stopwatch_internal.h diff --git a/cpp/src/arrow/util/tdigest.cc b/cpp/src/arrow/util/tdigest.cc index ec92fabed8e..36a83fb336d 100644 --- a/cpp/src/arrow/util/tdigest.cc +++ b/cpp/src/arrow/util/tdigest.cc @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. -#include "arrow/util/tdigest.h" +#include "arrow/util/tdigest_internal.h" #include #include diff --git a/cpp/src/arrow/util/tdigest_benchmark.cc b/cpp/src/arrow/util/tdigest_benchmark.cc index d9cd632c394..f0bf4234125 100644 --- a/cpp/src/arrow/util/tdigest_benchmark.cc +++ b/cpp/src/arrow/util/tdigest_benchmark.cc @@ -19,7 +19,7 @@ #include "arrow/testing/gtest_util.h" #include "arrow/testing/random.h" -#include "arrow/util/tdigest.h" +#include "arrow/util/tdigest_internal.h" namespace arrow { namespace util { diff --git a/cpp/src/arrow/util/tdigest.h b/cpp/src/arrow/util/tdigest_internal.h similarity index 100% rename from cpp/src/arrow/util/tdigest.h rename to cpp/src/arrow/util/tdigest_internal.h diff --git a/cpp/src/arrow/util/tdigest_test.cc b/cpp/src/arrow/util/tdigest_test.cc index 63395b676a6..04742ec46d8 100644 --- a/cpp/src/arrow/util/tdigest_test.cc +++ b/cpp/src/arrow/util/tdigest_test.cc @@ -33,7 +33,7 @@ #include "arrow/testing/gtest_util.h" #include "arrow/testing/random.h" #include "arrow/testing/util.h" -#include "arrow/util/tdigest.h" +#include "arrow/util/tdigest_internal.h" namespace arrow { namespace internal { diff --git a/cpp/src/arrow/util/trie.cc b/cpp/src/arrow/util/trie.cc index 7862c86f38d..2b2a60154e6 100644 --- a/cpp/src/arrow/util/trie.cc +++ b/cpp/src/arrow/util/trie.cc @@ -15,7 +15,7 @@ // specific language governing permissions and limitations // under the License. -#include "arrow/util/trie.h" +#include "arrow/util/trie_internal.h" #include #include diff --git a/cpp/src/arrow/util/trie_benchmark.cc b/cpp/src/arrow/util/trie_benchmark.cc index b938f87d8d1..27fbffac4d0 100644 --- a/cpp/src/arrow/util/trie_benchmark.cc +++ b/cpp/src/arrow/util/trie_benchmark.cc @@ -23,7 +23,7 @@ #include "arrow/status.h" #include "arrow/testing/gtest_util.h" -#include "arrow/util/trie.h" +#include "arrow/util/trie_internal.h" namespace arrow { namespace internal { diff --git a/cpp/src/arrow/util/trie.h b/cpp/src/arrow/util/trie_internal.h similarity index 100% rename from cpp/src/arrow/util/trie.h rename to cpp/src/arrow/util/trie_internal.h diff --git a/cpp/src/arrow/util/trie_test.cc b/cpp/src/arrow/util/trie_test.cc index 9c6b7678a46..86d274178d6 100644 --- a/cpp/src/arrow/util/trie_test.cc +++ b/cpp/src/arrow/util/trie_test.cc @@ -26,7 +26,7 @@ #include #include "arrow/testing/gtest_util.h" -#include "arrow/util/trie.h" +#include "arrow/util/trie_internal.h" namespace arrow { namespace internal { diff --git a/cpp/src/gandiva/gdv_function_stubs.cc b/cpp/src/gandiva/gdv_function_stubs.cc index 7a47f7491a4..76bbdb902c0 100644 --- a/cpp/src/gandiva/gdv_function_stubs.cc +++ b/cpp/src/gandiva/gdv_function_stubs.cc @@ -26,7 +26,7 @@ #include "arrow/util/base64.h" #include "arrow/util/bit_util.h" -#include "arrow/util/double_conversion.h" +#include "arrow/util/double_conversion_internal.h" #include "arrow/util/value_parsing.h" #include "gandiva/encrypt_utils.h" diff --git a/cpp/src/gandiva/gdv_string_function_stubs.cc b/cpp/src/gandiva/gdv_string_function_stubs.cc index 17eefbe22e3..42af8fde614 100644 --- a/cpp/src/gandiva/gdv_string_function_stubs.cc +++ b/cpp/src/gandiva/gdv_string_function_stubs.cc @@ -23,7 +23,7 @@ #include #include -#include "arrow/util/double_conversion.h" +#include "arrow/util/double_conversion_internal.h" #include "arrow/util/utf8_internal.h" #include "arrow/util/value_parsing.h" diff --git a/cpp/src/parquet/decoder.cc b/cpp/src/parquet/decoder.cc index 5382b0f6e7e..ff21abcb1f2 100644 --- a/cpp/src/parquet/decoder.cc +++ b/cpp/src/parquet/decoder.cc @@ -43,7 +43,7 @@ #include "arrow/util/int_util_overflow.h" #include "arrow/util/logging_internal.h" #include "arrow/util/rle_encoding_internal.h" -#include "arrow/util/spaced.h" +#include "arrow/util/spaced_internal.h" #include "arrow/util/ubsan.h" #include "arrow/visit_data_inline.h" diff --git a/cpp/src/parquet/encoder.cc b/cpp/src/parquet/encoder.cc index 2a759b8e092..112b810a8f9 100644 --- a/cpp/src/parquet/encoder.cc +++ b/cpp/src/parquet/encoder.cc @@ -40,7 +40,7 @@ #include "arrow/util/int_util_overflow.h" #include "arrow/util/logging_internal.h" #include "arrow/util/rle_encoding_internal.h" -#include "arrow/util/spaced.h" +#include "arrow/util/spaced_internal.h" #include "arrow/util/ubsan.h" #include "arrow/visit_data_inline.h" diff --git a/cpp/src/parquet/level_comparison.cc b/cpp/src/parquet/level_comparison.cc index f3188e987d0..199e137ceab 100644 --- a/cpp/src/parquet/level_comparison.cc +++ b/cpp/src/parquet/level_comparison.cc @@ -23,7 +23,7 @@ #include -#include "arrow/util/dispatch.h" +#include "arrow/util/dispatch_internal.h" namespace parquet::internal { From 2244783ed2f3f3ffa3c823ef5a4c77fbc064f82b Mon Sep 17 00:00:00 2001 From: Enrico Minack Date: Mon, 9 Jun 2025 11:32:11 +0200 Subject: [PATCH 29/63] GH-31603: [C++] Add SecureString implementation to arrow/util/ (#46626) ### Rationale for this change Arrow deals with secrets like encryption / decryption keys which must be kept private. One way of leaking such secrets is through memory allocation where another process allocates memory that previously hold the secret, because that memory was not cleared before being freed. ### What changes are included in this PR? Uses various implementations of securely clearing memory, notably - `SecureZeroMemory`(Windows) - `memset_s`(STDC) - `OPENSSL_cleanse` (OpenSSL >= 3) - `explicit_bzero`(glibc 2.25+) - volatile `memset` (fallback). ### Are these changes tested? Unit tests. ### Are there any user-facing changes? This only adds the `SecureString` class and tests. Using this new infrastructure is done in follow-up pull requests. * GitHub Issue: #31603 Lead-authored-by: Enrico Minack Co-authored-by: Antoine Pitrou Co-authored-by: Antoine Pitrou Signed-off-by: Antoine Pitrou --- cpp/src/arrow/CMakeLists.txt | 6 + cpp/src/arrow/util/CMakeLists.txt | 1 + cpp/src/arrow/util/secure_string.cc | 198 +++++++++ cpp/src/arrow/util/secure_string.h | 72 ++++ cpp/src/arrow/util/secure_string_test.cc | 498 +++++++++++++++++++++++ 5 files changed, 775 insertions(+) create mode 100644 cpp/src/arrow/util/secure_string.cc create mode 100644 cpp/src/arrow/util/secure_string.h create mode 100644 cpp/src/arrow/util/secure_string_test.cc diff --git a/cpp/src/arrow/CMakeLists.txt b/cpp/src/arrow/CMakeLists.txt index 77558726986..917f1d02a55 100644 --- a/cpp/src/arrow/CMakeLists.txt +++ b/cpp/src/arrow/CMakeLists.txt @@ -515,6 +515,7 @@ set(ARROW_UTIL_SRCS util/memory.cc util/mutex.cc util/ree_util.cc + util/secure_string.cc util/string.cc util/string_builder.cc util/task_group.cc @@ -574,6 +575,11 @@ if(ARROW_USE_GLOG) target_link_libraries(${ARROW_UTIL_TARGET} PRIVATE glog::glog) endforeach() endif() +if(ARROW_USE_OPENSSL) + foreach(ARROW_UTIL_TARGET ${ARROW_UTIL_TARGETS}) + target_link_libraries(${ARROW_UTIL_TARGET} PRIVATE ${ARROW_OPENSSL_LIBS}) + endforeach() +endif() if(ARROW_USE_XSIMD) foreach(ARROW_UTIL_TARGET ${ARROW_UTIL_TARGETS}) target_link_libraries(${ARROW_UTIL_TARGET} PRIVATE ${ARROW_XSIMD}) diff --git a/cpp/src/arrow/util/CMakeLists.txt b/cpp/src/arrow/util/CMakeLists.txt index 17eea5532cc..df47389240e 100644 --- a/cpp/src/arrow/util/CMakeLists.txt +++ b/cpp/src/arrow/util/CMakeLists.txt @@ -72,6 +72,7 @@ add_arrow_test(utility-test ree_util_test.cc reflection_test.cc rows_to_batches_test.cc + secure_string_test.cc small_vector_test.cc span_test.cc stl_util_test.cc diff --git a/cpp/src/arrow/util/secure_string.cc b/cpp/src/arrow/util/secure_string.cc new file mode 100644 index 00000000000..bd52c55f312 --- /dev/null +++ b/cpp/src/arrow/util/secure_string.cc @@ -0,0 +1,198 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +// __STDC_WANT_LIB_EXT1__ and string.h are required by memset_s: +// https://en.cppreference.com/w/c/string/byte/memset +#define __STDC_WANT_LIB_EXT1__ 1 +#include +#include + +#if defined(ARROW_USE_OPENSSL) +# include +# include +#endif + +#include "arrow/util/windows_compatibility.h" +#if defined(_WIN32) +# include +#endif + +#include "arrow/util/logging.h" +#include "arrow/util/secure_string.h" +#include "arrow/util/span.h" + +namespace arrow::util { + +/// Note: +/// A std::string is securely moved into a SecureString in two steps: +/// 1. the std::string is moved via std::move(string) +/// 2. the std::string is securely cleared +/// +/// The std::move has two different effects, depending on the size of the string. +/// A very short string (called local string) stores the string in a local buffer, +/// a long string stores a pointer to allocated memory that stores the string. +/// +/// If the string is a small string, std::move copies the local buffer. +/// If the string is a long string, std::move moves the pointer and then resets the +/// string size to 0 (which turns the string into a local string). +/// +/// In both cases, after a std::move(string), the string uses the local buffer. +/// +/// Thus, after a std::move(string), calling SecureClear(std::string*) only +/// securely clears the **local buffer** of the string. Therefore, std::move(string) +/// must move the pointer of long string into SecureString (which later clears the +/// string). Otherwise, the content of the string cannot be securely cleared. +/// +/// This condition is checked by SecureMove. + +namespace { +void SecureMove(std::string& string, std::string& dst) { + auto ptr = string.data(); + dst = std::move(string); + + // We require the buffer address string.data() to remain (not be freed) as is, + // or to be reused by dst. Otherwise, we cannot securely clear string after std::move + ARROW_CHECK(string.data() == ptr || dst.data() == ptr); +} +} // namespace + +void SecureString::SecureClear(std::string* secret) { + // call SecureClear first just in case secret->clear() frees some memory + SecureClear(reinterpret_cast(secret->data()), secret->capacity()); + secret->clear(); +} + +inline void SecureString::SecureClear(uint8_t* data, size_t size) { + // There is various prior art for this: + // https://www.cryptologie.net/article/419/zeroing-memory-compiler-optimizations-and-memset_s/ + // - libb2's `secure_zero_memory` at + // https://github.com/BLAKE2/libb2/blob/30d45a17c59dc7dbf853da3085b71d466275bd0a/src/blake2-impl.h#L140-L160 + // - libsodium's `sodium_memzero` at + // https://github.com/jedisct1/libsodium/blob/be58b2e6664389d9c7993b55291402934b43b3ca/src/libsodium/sodium/utils.c#L78:L101 + // Note: + // https://www.daemonology.net/blog/2014-09-06-zeroing-buffers-is-insufficient.html +#if defined(_WIN32) + // SecureZeroMemory is meant to not be optimized away + SecureZeroMemory(data, size); +#elif defined(__STDC_LIB_EXT1__) + // memset_s is meant to not be optimized away + memset_s(data, size, 0, size); +#elif defined(OPENSSL_VERSION_NUMBER) && OPENSSL_VERSION_NUMBER >= 0x30000000 + // rely on some implementation in OpenSSL cryptographic library + OPENSSL_cleanse(data, size); +#elif defined(__GLIBC__) && (__GLIBC__ > 2 || (__GLIBC__ == 2 && __GLIBC_MINOR__ >= 25)) + // explicit_bzero is meant to not be optimized away + explicit_bzero(data, size); +#else + // Volatile pointer to memset function is an attempt to avoid + // that the compiler optimizes away the memset function call. + // pretty much what OPENSSL_cleanse above does + // https://github.com/openssl/openssl/blob/3423c30db3aa044f46e1f0270e2ecd899415bf5f/crypto/mem_clr.c#L22 + static const volatile auto memset_v = &memset; + memset_v(data, 0, size); + +# if defined(__GNUC__) || defined(__clang__) + // __asm__ only supported by GCC and Clang + // not supported by MSVC on the ARM and x64 processors + // https://en.cppreference.com/w/c/language/asm.html + // https://en.cppreference.com/w/cpp/language/asm.html + + // Additional attempt on top of volatile memset_v above + // to avoid that the compiler optimizes away the memset function call. + // Assembler code that tells the compiler 'data' has side effects. + // https://gcc.gnu.org/onlinedocs/gcc/Extended-Asm.html: + // - "volatile": the asm produces side effects + // - "memory": effectively forms a read/write memory barrier for the compiler + __asm__ __volatile__("" /* no actual code */ + : /* no output */ + : "r"(data) /* input */ + : "memory" /* memory side effects beyond input and output */); +# endif +#endif +} + +SecureString::SecureString(SecureString&& other) noexcept { + SecureMove(other.secret_, secret_); + other.Dispose(); +} + +SecureString::SecureString(std::string&& secret) noexcept { + SecureMove(secret, secret_); + SecureClear(&secret); +} + +SecureString::SecureString(size_t n, char c) noexcept : secret_(n, c) {} + +SecureString& SecureString::operator=(SecureString&& other) noexcept { + if (this == &other) { + // self-assignment + return *this; + } + Dispose(); + SecureMove(other.secret_, secret_); + other.Dispose(); + return *this; +} + +SecureString& SecureString::operator=(const SecureString& other) { + if (this == &other) { + // self-assignment + return *this; + } + Dispose(); + secret_ = other.secret_; + return *this; +} + +SecureString& SecureString::operator=(std::string&& secret) noexcept { + Dispose(); + SecureMove(secret, secret_); + SecureClear(&secret); + return *this; +} + +bool SecureString::operator==(const SecureString& other) const { + return secret_ == other.secret_; +} + +bool SecureString::operator!=(const SecureString& other) const { + return secret_ != other.secret_; +} + +bool SecureString::empty() const { return secret_.empty(); } + +std::size_t SecureString::size() const { return secret_.size(); } + +std::size_t SecureString::length() const { return secret_.length(); } + +std::size_t SecureString::capacity() const { return secret_.capacity(); } + +span SecureString::as_span() { + return {reinterpret_cast(secret_.data()), secret_.size()}; +} + +span SecureString::as_span() const { + return {reinterpret_cast(secret_.data()), secret_.size()}; +} + +std::string_view SecureString::as_view() const { + return {secret_.data(), secret_.size()}; +} + +void SecureString::Dispose() { SecureClear(&secret_); } + +} // namespace arrow::util diff --git a/cpp/src/arrow/util/secure_string.h b/cpp/src/arrow/util/secure_string.h new file mode 100644 index 00000000000..843f6cd0a95 --- /dev/null +++ b/cpp/src/arrow/util/secure_string.h @@ -0,0 +1,72 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +#pragma once + +#include +#include + +#include "arrow/util/span.h" +#include "arrow/util/visibility.h" + +namespace arrow::util { +/** + * A secure string that ensures the wrapped string is cleared from memory on + * deconstruction. This class can only be created from std::string that are securely + * erased after creation. + * + * Note: This class does not provide a constructor / assignment operator that copies a + * std::string because that would allow code to create a SecureString while accidentally + * not noticing the need to securely erasing the argument after invoking the constructor / + * calling the assignment operator. + */ +class ARROW_EXPORT SecureString { + public: + SecureString() noexcept = default; + SecureString(SecureString&&) noexcept; + SecureString(const SecureString&) = default; + explicit SecureString(std::string&&) noexcept; + explicit SecureString(size_t, char) noexcept; + + SecureString& operator=(SecureString&&) noexcept; + SecureString& operator=(const SecureString&); + SecureString& operator=(std::string&&) noexcept; + + bool operator==(const SecureString&) const; + bool operator!=(const SecureString&) const; + + ~SecureString() { Dispose(); } + + [[nodiscard]] bool empty() const; + [[nodiscard]] std::size_t size() const; + [[nodiscard]] std::size_t length() const; + [[nodiscard]] std::size_t capacity() const; + + [[nodiscard]] span as_span(); + [[nodiscard]] span as_span() const; + [[nodiscard]] std::string_view as_view() const; + + void Dispose(); + + static void SecureClear(std::string*); + static void SecureClear(uint8_t* data, size_t size); + + private: + std::string secret_; +}; + +} // namespace arrow::util diff --git a/cpp/src/arrow/util/secure_string_test.cc b/cpp/src/arrow/util/secure_string_test.cc new file mode 100644 index 00000000000..213a4b11f20 --- /dev/null +++ b/cpp/src/arrow/util/secure_string_test.cc @@ -0,0 +1,498 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +#include +#include +#include + +#include "arrow/util/secure_string.h" + +namespace arrow::util::test { + +#if defined(ARROW_VALGRIND) || defined(ADDRESS_SANITIZER) || defined(THREAD_SANITIZER) +# define CAN_TEST_DEALLOCATED_AREAS 0 +#else +# define CAN_TEST_DEALLOCATED_AREAS 1 +#endif + +std::string_view StringArea(const std::string& string) { + return {string.data(), string.capacity()}; +} + +// same as GTest ASSERT_PRED_FORMAT2 macro, but without the outer GTEST_ASSERT_ +#define COMPARE(val1, val2) \ + ::testing::internal::EqHelper::Compare(#val1, #val2, val1, val2) + +::testing::AssertionResult IsSecurelyCleared(const std::string_view& area) { + // the entire area is filled with zeros + std::string zeros(area.size(), '\0'); + return COMPARE(area, std::string_view(zeros)); +} + +::testing::AssertionResult IsSecurelyCleared(const std::string& string) { + return IsSecurelyCleared(StringArea(string)); +} + +/** + * Checks the area has been securely cleared after some position. + */ +::testing::AssertionResult IsSecurelyCleared(const std::string_view& area, + const size_t pos) { + // the area after pos is filled with zeros + if (pos < area.size()) { + std::string zeros(area.size() - pos, '\0'); + return COMPARE(area.substr(pos), std::string_view(zeros)); + } + return ::testing::AssertionSuccess(); +} + +/** + * Checks the area has been securely cleared from the secret value. + * Assumes the area has been deallocated, so it might have been reclaimed and changed + * after cleaning. We cannot check for all-zeros, best we can check here is no secret + * character has leaked. If by any chance the modification produced a former key character + * at the right position, this will be false negative / flaky. Therefore, we check for + * three consecutive secret characters before we fail. + */ +::testing::AssertionResult IsSecurelyCleared(const std::string_view& area, + const std::string& secret_value) { +#if !CAN_TEST_DEALLOCATED_AREAS + return testing::AssertionSuccess() << "Not checking deallocated memory"; +#else + // accessing deallocated memory will fail when running with Address Sanitizer enabled + auto leaks = 0; + for (size_t i = 0; i < std::min(area.length(), secret_value.length()); i++) { + if (area[i] == secret_value[i]) { + leaks++; + } else { + if (leaks >= 3) { + break; + } + leaks = 0; + } + } + if (leaks >= 3) { + return ::testing::AssertionFailure() + << leaks << " characters of secret leaked into " << area; + } + return ::testing::AssertionSuccess(); +#endif +} + +#undef COMPARE + +TEST(TestSecureString, AssertSecurelyCleared) { + // This tests AssertSecurelyCleared helper methods is actually able to identify secret + // leakage. It retrieves assertion results and asserts result type and message. + testing::AssertionResult result = testing::AssertionSuccess(); + + // check short string with all zeros + auto short_zeros = std::string(8, '\0'); + short_zeros.resize(short_zeros.capacity(), '\0'); // for string buffers longer than 8 + short_zeros.resize(8); // now the entire string buffer has zeros + // checks the entire string buffer (capacity) + ASSERT_TRUE(IsSecurelyCleared(short_zeros)); + // checks only 10 bytes (length) + ASSERT_TRUE(IsSecurelyCleared(std::string_view(short_zeros))); + + // check long string with all zeros + auto long_zeros = std::string(1000, '\0'); + long_zeros.resize(long_zeros.capacity(), '\0'); // for longer string buffers + long_zeros.resize(1000); // now the entire string buffer has zeros + // checks the entire string buffer (capacity) + ASSERT_TRUE(IsSecurelyCleared(long_zeros)); + // checks only 1000 bytes (length) + ASSERT_TRUE(IsSecurelyCleared(std::string_view(long_zeros))); + + auto no_zeros = std::string("abcdefghijklmnopqrstuvwxyz"); + // string buffer in no_zeros can be larger than no_zeros.length() + // assert only the area that we can control + auto no_zeros_view = std::string_view(no_zeros); + result = IsSecurelyCleared(no_zeros_view); + ASSERT_FALSE(result); + ASSERT_EQ(std::string(result.message()), + "Expected equality of these values:\n" + " area\n" + " Which is: \"abcdefghijklmnopqrstuvwxyz\"\n" + " std::string_view(zeros)\n" + " Which is: " + "\"\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\" + "0\\0\\0\\0\\0\""); + + // check short string with zeros and non-zeros after string length + auto stars = std::string(12, '*'); + auto short_some_zeros = stars; + memset(short_some_zeros.data(), '\0', 8); + short_some_zeros.resize(8); + // string buffer in short_some_zeros can be larger than 12 + // assert only the area that we can control + auto short_some_zeros_view = std::string_view(short_some_zeros.data(), 12); + result = IsSecurelyCleared(short_some_zeros_view); + ASSERT_FALSE(result); + ASSERT_EQ(std::string(result.message()), + "Expected equality of these values:\n" + " area\n" + " Which is: \"\\0\\0\\0\\0\\0\\0\\0\\0\\0***\"\n" + " std::string_view(zeros)\n" + " Which is: \"\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\""); + + ASSERT_TRUE(IsSecurelyCleared(short_some_zeros, stars)); +#if CAN_TEST_DEALLOCATED_AREAS + result = IsSecurelyCleared(short_some_zeros_view, stars); + ASSERT_FALSE(result); + ASSERT_EQ(std::string(result.message()), + "3 characters of secret leaked into " + "\\0\\0\\0\\0\\0\\0\\0\\0\\0***"); +#endif + + // check long string with zeros and non-zeros after string length + stars = std::string(42, '*'); + auto long_some_zeros = stars; + memset(long_some_zeros.data(), '\0', 32); + long_some_zeros.resize(32); + // string buffer in long_some_zeros can be larger than 42 + // assert only the area that we can control + auto long_some_zeros_view = std::string_view(long_some_zeros.data(), 42); + result = IsSecurelyCleared(long_some_zeros_view); + ASSERT_FALSE(result); + ASSERT_EQ(std::string(result.message()), + "Expected equality of these values:\n" + " area\n" + " Which is: " + "\"\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\" + "0\\0\\0\\0\\0\\0\\0\\0\\0*********\"\n" + " std::string_view(zeros)\n" + " Which is: " + "\"\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\" + "0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\""); + + ASSERT_TRUE(IsSecurelyCleared(long_some_zeros, stars)); +#if CAN_TEST_DEALLOCATED_AREAS + result = IsSecurelyCleared(long_some_zeros_view, stars); + ASSERT_FALSE(result); + ASSERT_EQ(std::string(result.message()), + "9 characters of secret leaked into " + "\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\0\\" + "0\\0\\0\\0\\0\\0\\0\\0\\0*********"); +#endif + + // check string with non-zeros and zeros after string length + auto some_zeros_back = std::string(no_zeros.length() + 3, '\0'); + some_zeros_back = no_zeros; + memset(some_zeros_back.data() + no_zeros.length() * sizeof(char), '\0', 3 + 1); + // string buffer in some_zeros_back can be larger than no_zeros.length() + 3 + // assert only the area that we can control + auto some_zeros_back_view = + std::string_view(some_zeros_back.data(), no_zeros.length() + 3); + ASSERT_TRUE(IsSecurelyCleared(some_zeros_back_view, no_zeros.length())); +} + +TEST(TestSecureString, SecureClearString) { + // short string + { + std::string tiny("abc"); + auto old_area = StringArea(tiny); + SecureString::SecureClear(&tiny); + ASSERT_TRUE(IsSecurelyCleared(tiny)); + ASSERT_TRUE(IsSecurelyCleared(old_area)); + } + + // long string + { + std::string large(1024, 'x'); + large.resize(512, 'y'); + auto old_area = StringArea(large); + SecureString::SecureClear(&large); + ASSERT_TRUE(IsSecurelyCleared(large)); + ASSERT_TRUE(IsSecurelyCleared(old_area)); + } + + // empty string + { + // this creates an empty string with some non-zero characters in the string buffer + // we test that all those characters are securely cleared + std::string empty("abcdef"); + empty.resize(0); + auto old_area = StringArea(empty); + SecureString::SecureClear(&empty); + ASSERT_TRUE(IsSecurelyCleared(empty)); + ASSERT_TRUE(IsSecurelyCleared(old_area)); + } +} + +TEST(TestSecureString, Construct) { + // We use a very short and a very long string as memory management of short and long + // strings behaves differently. + std::vector strings = {"short secret", std::string(1024, 'x')}; + + for (const auto& original_string : strings) { + // move-constructing from a string either reuses its buffer or securely clears + // that string + std::string string = original_string; + auto old_string = StringArea(string); + SecureString secret_from_string(std::move(string)); + ASSERT_TRUE(IsSecurelyCleared(string)); + if (secret_from_string.as_view().data() != old_string.data()) { + ASSERT_TRUE(IsSecurelyCleared(old_string)); + } + ASSERT_FALSE(secret_from_string.empty()); + ASSERT_EQ(secret_from_string.as_view(), original_string); + + // move-constructing from a secure string securely clears that secure string + auto old_secret_from_string_view = secret_from_string.as_view(); + auto old_secret_from_string_value = std::string(secret_from_string.as_view()); + SecureString secret_from_move_secret(std::move(secret_from_string)); + ASSERT_TRUE(secret_from_string.empty()); + if (secret_from_move_secret.as_view().data() != old_secret_from_string_view.data()) { + ASSERT_TRUE(IsSecurelyCleared(old_secret_from_string_view)); + } + ASSERT_FALSE(secret_from_move_secret.empty()); + ASSERT_EQ(secret_from_move_secret.as_view(), old_secret_from_string_value); + + // copy-constructing from a secure string does not modify that secure string + SecureString secret_from_secret(secret_from_move_secret); + ASSERT_FALSE(secret_from_move_secret.empty()); + ASSERT_EQ(secret_from_move_secret.as_view(), old_secret_from_string_value); + ASSERT_FALSE(secret_from_secret.empty()); + ASSERT_EQ(secret_from_secret, secret_from_move_secret); + } +} + +TEST(TestSecureString, Assign) { + // We initialize with the first string and iteratively assign the subsequent values. + // The first two values are local (very short strings), the remainder are non-local + // strings. Memory management of short and long strings behaves differently. + std::vector test_strings = {"secret", "another secret", + std::string(128, 'x'), std::string(1024, 'y')}; + for (auto& string : test_strings) { + // string buffer might be longer than string.length with arbitrary bytes + // secure string does not have to protect that garbage bytes + // zeroing here so we get expected results + auto length = string.length(); + string.resize(string.capacity(), '\0'); + string.resize(length); + } + + std::vector reverse_strings = std::vector(test_strings); + std::reverse(reverse_strings.begin(), reverse_strings.end()); + + for (auto vec : {test_strings, reverse_strings}) { + auto init_string = vec[0]; + auto strings = std::vector(vec.begin() + 1, vec.end()); + + { + // an initialized secure string + std::string init_string_copy(init_string); + SecureString secret_from_string(std::move(init_string_copy)); + + // move-assigning from a string securely clears that string + // the earlier value of the secure string is securely cleared + for (const auto& string : strings) { + auto string_copy = std::string(string); + auto old_string_copy_area = StringArea(string_copy); + ASSERT_FALSE(string.empty()); + ASSERT_FALSE(string_copy.empty()); + auto old_secret_from_string_area = secret_from_string.as_view(); + auto old_secret_from_string_value = std::string(secret_from_string.as_view()); + + secret_from_string = std::move(string_copy); + + ASSERT_FALSE(string.empty()); + ASSERT_TRUE(string_copy.empty()); + ASSERT_TRUE(IsSecurelyCleared(string_copy)); + auto secret_from_string_view = secret_from_string.as_view(); + // the secure string can reuse the string_copy's string buffer after assignment + // then, string_copy's string buffer is obviously not cleared + if (secret_from_string_view.data() != old_string_copy_area.data()) { + ASSERT_TRUE(IsSecurelyCleared(old_string_copy_area, string)); + } + ASSERT_FALSE(secret_from_string.empty()); + ASSERT_EQ(secret_from_string.size(), string.size()); + ASSERT_EQ(secret_from_string.length(), string.length()); + ASSERT_EQ(secret_from_string_view, string); + if (secret_from_string_view.data() == old_secret_from_string_area.data()) { + // when secure string reuses the buffer, the old value must be cleared + ASSERT_TRUE( + IsSecurelyCleared(old_secret_from_string_area, secret_from_string.size())); + } else { + // when secure string has a new buffer, the old buffer must be cleared + ASSERT_TRUE(IsSecurelyCleared(old_secret_from_string_area, + old_secret_from_string_value)); + } + } + } + + { + // an initialized secure string + std::string init_string_copy(init_string); + SecureString secret_from_move_secret(std::move(init_string_copy)); + + // move-assigning from a secure string securely clears that secure string + // the earlier value of the secure string is securely cleared + for (const auto& string : strings) { + auto string_copy = std::string(string); + SecureString secret_string(std::move(string_copy)); + ASSERT_FALSE(string.empty()); + ASSERT_TRUE(string_copy.empty()); + ASSERT_FALSE(secret_string.empty()); + auto old_secret_string_area = secret_string.as_view(); + auto old_secret_string_value = std::string(secret_string.as_view()); + auto old_secret_from_move_secret_area = secret_from_move_secret.as_view(); + auto old_secret_from_move_secret_value = + std::string(secret_from_move_secret.as_view()); + + secret_from_move_secret = std::move(secret_string); + + ASSERT_TRUE(secret_string.empty()); + auto secret_from_move_secret_view = secret_from_move_secret.as_view(); + // the secure string can reuse the string_copy's string buffer after assignment + // then, string_copy's string buffer is obviously not cleared + if (old_secret_string_area.data() != secret_from_move_secret_view.data()) { + ASSERT_TRUE(IsSecurelyCleared(old_secret_string_area, + old_secret_from_move_secret_value)); + } + ASSERT_FALSE(secret_from_move_secret.empty()); + ASSERT_EQ(secret_from_move_secret.size(), string.size()); + ASSERT_EQ(secret_from_move_secret.length(), string.length()); + ASSERT_EQ(secret_from_move_secret_view, string); + if (old_secret_from_move_secret_area.data() == + secret_from_move_secret_view.data()) { + // when secure string reuses the buffer, the old value must be cleared + ASSERT_TRUE(IsSecurelyCleared(old_secret_from_move_secret_area, + secret_from_move_secret.size())); + } else { + // when secure string has a new buffer, the old buffer must be cleared + ASSERT_TRUE(IsSecurelyCleared(old_secret_from_move_secret_area, + old_secret_from_move_secret_value)); + } + } + } + + { + // an initialized secure string + std::string init_string_copy(init_string); + SecureString secret_from_copy_secret(std::move(init_string_copy)); + + // copy-assigning from a secure string does not modify that secure string + // the earlier value of the secure string is securely cleared + for (const auto& string : strings) { + auto string_copy = std::string(string); + SecureString secret_string(std::move(string_copy)); + ASSERT_FALSE(string.empty()); + ASSERT_TRUE(string_copy.empty()); + ASSERT_FALSE(secret_string.empty()); + auto old_secret_from_copy_secret_area = secret_from_copy_secret.as_view(); + auto old_secret_from_copy_secret_value = + std::string(secret_from_copy_secret.as_view()); + + secret_from_copy_secret = secret_string; + + ASSERT_FALSE(secret_string.empty()); + ASSERT_FALSE(secret_from_copy_secret.empty()); + ASSERT_EQ(secret_from_copy_secret.size(), string.size()); + ASSERT_EQ(secret_from_copy_secret.length(), string.length()); + ASSERT_EQ(secret_from_copy_secret.as_view(), string); + if (old_secret_from_copy_secret_area.data() == + secret_from_copy_secret.as_view().data()) { + // when secure string reuses the buffer, the old value must be cleared + ASSERT_TRUE(IsSecurelyCleared(old_secret_from_copy_secret_area, + secret_from_copy_secret.size())); + } else { + // when secure string has a new buffer, the old buffer must be cleared + ASSERT_TRUE(IsSecurelyCleared(old_secret_from_copy_secret_area, + old_secret_from_copy_secret_value)); + } + } + } + } +} + +TEST(TestSecureString, Deconstruct) { +#if !CAN_TEST_DEALLOCATED_AREAS + GTEST_SKIP() << "Test accesses deallocated memory"; +#else + // We use a very short and a very long string as memory management of short and long + // strings behaves differently. + std::vector strings = {"short secret", std::string(1024, 'x')}; + + for (auto& string : strings) { + auto old_string_value = string; + std::string_view view; + { + // construct secret + auto secret = SecureString(std::move(string)); + // memorize view + view = secret.as_view(); + // deconstruct secret on leaving this context + } + // assert secret memory is cleared on deconstruction + ASSERT_TRUE(IsSecurelyCleared(view, old_string_value)); + // so is the string (tested more thoroughly elsewhere) + ASSERT_TRUE(IsSecurelyCleared(string)); + } +#endif +} + +TEST(TestSecureString, Compare) { + ASSERT_TRUE(SecureString("") == SecureString("")); + ASSERT_FALSE(SecureString("") != SecureString("")); + + ASSERT_TRUE(SecureString("hello world") == SecureString("hello world")); + ASSERT_FALSE(SecureString("hello world") != SecureString("hello world")); + + ASSERT_FALSE(SecureString("hello world") == SecureString("hello worlds")); + ASSERT_TRUE(SecureString("hello world") != SecureString("hello worlds")); +} + +TEST(TestSecureString, Cardinality) { + ASSERT_TRUE(SecureString("").empty()); + ASSERT_EQ(SecureString("").size(), 0); + ASSERT_EQ(SecureString("").length(), 0); + + ASSERT_FALSE(SecureString("hello world").empty()); + ASSERT_EQ(SecureString("hello world").size(), 11); + ASSERT_EQ(SecureString("hello world").length(), 11); +} + +TEST(TestSecureString, AsSpan) { + SecureString secret("hello world"); + const SecureString& const_secret(secret); + auto const_span = const_secret.as_span(); + auto mutable_span = secret.as_span(); + + std::string expected = "hello world"; + span expected_span = {reinterpret_cast(expected.data()), expected.size()}; + ASSERT_EQ(const_span, expected_span); + ASSERT_EQ(mutable_span, expected_span); + + // modify secret through mutual span + // the const span shares the same secret, so it is changed as well + mutable_span[0] = 'H'; + expected_span[0] = 'H'; + ASSERT_EQ(const_span, expected_span); + ASSERT_EQ(mutable_span, expected_span); +} + +TEST(TestSecureString, AsView) { + const SecureString secret = SecureString("hello world"); + const std::string_view view = secret.as_view(); + ASSERT_EQ(view, "hello world"); +} + +#undef CAN_TEST_DEALLOCATED_AREAS + +} // namespace arrow::util::test From 292e98695876ba25998488f8e8bf28bb9d760dfe Mon Sep 17 00:00:00 2001 From: Antoine Pitrou Date: Mon, 9 Jun 2025 17:28:47 +0200 Subject: [PATCH 30/63] GH-46704: [C++] Fix OSS-Fuzz build failure (#46706) ### Rationale for this change PR #46408 included a typo that changed list-view IPC tests to use the same data as list tests. This was detected as a duplicate corpus file by the OSS-Fuzz CI build. ### What changes are included in this PR? Undo mistake that led to using the same test data for lists and list-views. Also fix a regression in the CUDA tests, due to reading non-CPU memory when fetching the first offset in a list/binary array. ### Are these changes tested? Yes. ### Are there any user-facing changes? No. * GitHub Issue: #46704 Authored-by: Antoine Pitrou Signed-off-by: Antoine Pitrou --- cpp/build-support/fuzzing/pack_corpus.py | 10 ++++++---- cpp/src/arrow/ipc/test_common.cc | 2 +- cpp/src/arrow/ipc/writer.cc | 18 +++++++++++++++--- 3 files changed, 22 insertions(+), 8 deletions(-) diff --git a/cpp/build-support/fuzzing/pack_corpus.py b/cpp/build-support/fuzzing/pack_corpus.py index 07fc09f9026..94d9a88b387 100755 --- a/cpp/build-support/fuzzing/pack_corpus.py +++ b/cpp/build-support/fuzzing/pack_corpus.py @@ -27,7 +27,7 @@ def process_dir(corpus_dir, zip_output): - seen = set() + seen_hashes = {} for child in corpus_dir.iterdir(): if not child.is_file(): @@ -35,10 +35,12 @@ def process_dir(corpus_dir, zip_output): with child.open('rb') as f: data = f.read() arcname = hashlib.sha1(data).hexdigest() - if arcname in seen: - raise ValueError(f"Duplicate hash: {arcname} (in file {child})") + if arcname in seen_hashes: + raise ValueError( + f"Duplicate hash: {arcname} (in file {child}), " + f"already seen in file {seen_hashes[arcname]}") zip_output.writestr(str(arcname), data) - seen.add(arcname) + seen_hashes[arcname] = child def main(corpus_dir, zip_output_name): diff --git a/cpp/src/arrow/ipc/test_common.cc b/cpp/src/arrow/ipc/test_common.cc index 46060a0db10..a739990fc93 100644 --- a/cpp/src/arrow/ipc/test_common.cc +++ b/cpp/src/arrow/ipc/test_common.cc @@ -474,7 +474,7 @@ Status MakeListViewRecordBatchSized(const int length, std::shared_ptr* out) { - return MakeListRecordBatchSized(200, out); + return MakeListViewRecordBatchSized(200, out); } Status MakeFixedSizeListRecordBatch(std::shared_ptr* out) { diff --git a/cpp/src/arrow/ipc/writer.cc b/cpp/src/arrow/ipc/writer.cc index 8b7d943fc71..4238ecbf3a0 100644 --- a/cpp/src/arrow/ipc/writer.cc +++ b/cpp/src/arrow/ipc/writer.cc @@ -329,15 +329,24 @@ class RecordBatchSerializer { return Status::OK(); } - int64_t required_bytes = sizeof(offset_type) * (array.length() + 1); - if (array.value_offset(0) > 0) { + const int64_t required_bytes = sizeof(offset_type) * (array.length() + 1); + + offset_type first_offset = 0; + RETURN_NOT_OK(MemoryManager::CopyBufferSliceToCPU( + array.data()->buffers[1], array.offset() * sizeof(offset_type), + sizeof(offset_type), reinterpret_cast(&first_offset))); + + if (first_offset > 0) { // If the offset of the first value is non-zero, then we must create a new // offsets buffer with shifted offsets. + if (!array.data()->buffers[1]->is_cpu()) { + return Status::NotImplemented("Rebasing non-CPU offsets"); + } ARROW_ASSIGN_OR_RAISE(auto shifted_offsets, AllocateBuffer(required_bytes, options_.memory_pool)); - auto dest_offsets = shifted_offsets->mutable_span_as(); const offset_type* source_offsets = array.raw_value_offsets(); + auto dest_offsets = shifted_offsets->mutable_span_as(); const offset_type start_offset = source_offsets[0]; for (int i = 0; i <= array.length(); ++i) { @@ -369,6 +378,9 @@ class RecordBatchSerializer { // If we have a non-zero offset, it's likely that the smallest offset is // not zero. We must a) create a new offsets array with shifted offsets and // b) slice the values array accordingly. + if (!array.data()->buffers[1]->is_cpu()) { + return Status::NotImplemented("Rebasing non-CPU list view offsets"); + } ARROW_ASSIGN_OR_RAISE(auto shifted_offsets, AllocateBuffer(required_bytes, options_.memory_pool)); From f8a81c5bc87afe44823d79cc15303f5e2bc3ec7d Mon Sep 17 00:00:00 2001 From: David Sherrier Date: Mon, 9 Jun 2025 16:58:55 +0100 Subject: [PATCH 31/63] GH-46403: [C++] Add support for limiting element size when printing data (#46536) ### Rationale for this change #46403 ### What changes are included in this PR? A new PrettyPrinter option is added to limit elements to 100 characters by default. ### Are these changes tested? Yes ### Are there any user-facing changes? Yes, the default length for outputted elements when stringifying them is now different so if a user was relying on ToString of an array with large elements that result may now be changed. * GitHub Issue: #46403 Lead-authored-by: David Sherrier Co-authored-by: Antoine Pitrou Signed-off-by: Antoine Pitrou --- cpp/src/arrow/pretty_print.cc | 31 +++++++++++--- cpp/src/arrow/pretty_print.h | 8 +++- cpp/src/arrow/pretty_print_test.cc | 60 +++++++++++++++++++++------- python/pyarrow/array.pxi | 6 ++- python/pyarrow/includes/libarrow.pxd | 1 + python/pyarrow/table.pxi | 6 ++- python/pyarrow/tests/test_array.py | 23 +++++++++-- python/pyarrow/tests/test_schema.py | 7 ++-- python/pyarrow/types.pxi | 5 ++- 9 files changed, 115 insertions(+), 32 deletions(-) diff --git a/cpp/src/arrow/pretty_print.cc b/cpp/src/arrow/pretty_print.cc index c5905d0c8c5..807498b6bfe 100644 --- a/cpp/src/arrow/pretty_print.cc +++ b/cpp/src/arrow/pretty_print.cc @@ -59,7 +59,9 @@ class PrettyPrinter { : options_(options), indent_(options.indent), sink_(sink) {} inline void Write(std::string_view data); + inline void Write(std::string_view data, int max_chars); inline void WriteIndented(std::string_view data); + inline void WriteIndented(std::string_view data, int max_chars); inline void Newline(); inline void Indent(); inline void IndentAfterNewline(); @@ -104,11 +106,26 @@ void PrettyPrinter::CloseArray(const Array& array) { (*sink_) << options_.array_delimiters.close; } -void PrettyPrinter::Write(std::string_view data) { (*sink_) << data; } +void PrettyPrinter::Write(std::string_view data) { + Write(data, options_.element_size_limit); +} + +void PrettyPrinter::Write(std::string_view data, int max_chars) { + (*sink_) << data.substr(0, max_chars); + if (data.size() > static_cast(max_chars)) { + (*sink_) << " (... " << data.size() - static_cast(max_chars) + << " chars omitted)"; + } +} void PrettyPrinter::WriteIndented(std::string_view data) { Indent(); - Write(data); + Write(data, options_.element_size_limit); +} + +void PrettyPrinter::WriteIndented(std::string_view data, int max_chars) { + Indent(); + Write(data, max_chars); } void PrettyPrinter::Newline() { @@ -176,7 +193,7 @@ class ArrayPrinter : public PrettyPrinter { template Status WritePrimitiveValues(const ArrayType& array, Formatter* formatter) { - auto appender = [&](std::string_view v) { (*sink_) << v; }; + auto appender = [&](std::string_view v) { Write(v); }; auto format_func = [&](int64_t i) { (*formatter)(array.GetView(i), appender); return Status::OK(); @@ -232,9 +249,11 @@ class ArrayPrinter : public PrettyPrinter { enable_if_has_string_view WriteDataValues(const ArrayType& array) { return WriteValues(array, [&](int64_t i) { if constexpr (T::is_utf8) { - (*sink_) << "\"" << array.GetView(i) << "\""; + (*sink_) << "\""; + this->Write(array.GetView(i), options_.element_size_limit - 2); + (*sink_) << "\""; } else { - (*sink_) << HexEncode(array.GetView(i)); + this->Write(HexEncode(array.GetView(i))); } return Status::OK(); }); @@ -243,7 +262,7 @@ class ArrayPrinter : public PrettyPrinter { template enable_if_decimal WriteDataValues(const ArrayType& array) { return WriteValues(array, [&](int64_t i) { - (*sink_) << array.FormatValue(i); + this->Write(array.FormatValue(i)); return Status::OK(); }); } diff --git a/cpp/src/arrow/pretty_print.h b/cpp/src/arrow/pretty_print.h index ad68726716c..7e5eca4300b 100644 --- a/cpp/src/arrow/pretty_print.h +++ b/cpp/src/arrow/pretty_print.h @@ -58,14 +58,15 @@ struct ARROW_EXPORT PrettyPrintOptions { PrettyPrintOptions(int indent, // NOLINT runtime/explicit int window = 10, int indent_size = 2, std::string null_rep = "null", bool skip_new_lines = false, bool truncate_metadata = true, - int container_window = 2) + int container_window = 2, int element_size_limit = 100) : indent(indent), indent_size(indent_size), window(window), container_window(container_window), null_rep(std::move(null_rep)), skip_new_lines(skip_new_lines), - truncate_metadata(truncate_metadata) {} + truncate_metadata(truncate_metadata), + element_size_limit(element_size_limit) {} /// Create a PrettyPrintOptions instance with default values static PrettyPrintOptions Defaults() { return PrettyPrintOptions(); } @@ -99,6 +100,9 @@ struct ARROW_EXPORT PrettyPrintOptions { /// If true, display schema metadata when pretty-printing a Schema bool show_schema_metadata = true; + /// Limit each element to specified number of characters, defaults to 100 + int element_size_limit = 100; + /// Delimiters to use when printing an Array PrettyPrintDelimiters array_delimiters = PrettyPrintDelimiters::Defaults(); diff --git a/cpp/src/arrow/pretty_print_test.cc b/cpp/src/arrow/pretty_print_test.cc index 108b212cca5..0dfe3c9db3e 100644 --- a/cpp/src/arrow/pretty_print_test.cc +++ b/cpp/src/arrow/pretty_print_test.cc @@ -25,6 +25,7 @@ #include #include #include +#include #include #include "arrow/array.h" @@ -47,37 +48,37 @@ class TestPrettyPrint : public ::testing::Test { }; template -void CheckStream(const T& obj, const PrettyPrintOptions& options, const char* expected) { +void CheckStream(const T& obj, const PrettyPrintOptions& options, + std::string_view expected) { std::ostringstream sink; ASSERT_OK(PrettyPrint(obj, options, &sink)); std::string result = sink.str(); - ASSERT_EQ(std::string(expected, strlen(expected)), result); + ASSERT_EQ(expected, result); } -void CheckArray(const Array& arr, const PrettyPrintOptions& options, const char* expected, - bool check_operator = true) { +void CheckArray(const Array& arr, const PrettyPrintOptions& options, + std::string_view expected, bool check_operator = true) { ARROW_SCOPED_TRACE("For datatype: ", arr.type()->ToString()); CheckStream(arr, options, expected); - if (options.indent == 0 && check_operator) { + if (options.indent == 0 && options.element_size_limit == 100 && check_operator) { std::stringstream ss; ss << arr; - std::string result = std::string(expected, strlen(expected)); - ASSERT_EQ(result, ss.str()); + ASSERT_EQ(expected, ss.str()); } } template -void Check(const T& obj, const PrettyPrintOptions& options, const char* expected) { +void Check(const T& obj, const PrettyPrintOptions& options, std::string_view expected) { std::string result; ASSERT_OK(PrettyPrint(obj, options, &result)); - ASSERT_EQ(std::string(expected, strlen(expected)), result); + ASSERT_EQ(expected, result); } template void CheckPrimitive(const std::shared_ptr& type, const PrettyPrintOptions& options, const std::vector& is_valid, - const std::vector& values, const char* expected, + const std::vector& values, std::string_view expected, bool check_operator = true) { std::shared_ptr array; ArrayFromVector(type, is_valid, values, &array); @@ -86,7 +87,7 @@ void CheckPrimitive(const std::shared_ptr& type, template void CheckPrimitive(const PrettyPrintOptions& options, const std::vector& is_valid, - const std::vector& values, const char* expected, + const std::vector& values, std::string_view expected, bool check_operator = true) { CheckPrimitive(TypeTraits::type_singleton(), options, is_valid, values, expected, check_operator); @@ -158,12 +159,12 @@ TEST_F(TestPrettyPrint, PrimitiveType) { ])expected"; CheckPrimitive({2, 10}, is_valid, values2, ex2_in2); - std::vector values3 = {"foo", "bar", "", "baz", ""}; + std::vector values3 = {"foo", "bar", "", "a longer string", ""}; static const char* ex3 = R"expected([ "foo", "bar", null, - "baz", + "a longer string", null ])expected"; CheckPrimitive({0, 10}, is_valid, values3, ex3); @@ -172,11 +173,23 @@ TEST_F(TestPrettyPrint, PrimitiveType) { "foo", "bar", null, - "baz", + "a longer string", null ])expected"; CheckPrimitive({2, 10}, is_valid, values3, ex3_in2); CheckPrimitive({2, 10}, is_valid, values3, ex3_in2); + + PrettyPrintOptions options{2, 10}; + options.element_size_limit = 8; + static const char* ex3_in3 = R"expected( [ + "foo", + "bar", + null, + "a long (... 9 chars omitted)", + null + ])expected"; + CheckPrimitive(options, is_valid, values3, ex3_in3); + CheckPrimitive(options, is_valid, values3, ex3_in3); } TEST_F(TestPrettyPrint, PrimitiveTypeNoNewlines) { @@ -772,6 +785,12 @@ TEST_F(TestPrettyPrint, BinaryNoNewlines) { options.window = 2; expected = "[666F6F,626172,...,,FF]"; CheckPrimitive(options, is_valid, values, expected, false); + + // With truncated element size + options.element_size_limit = 1; + expected = + "[6 (... 5 chars omitted),6 (... 5 chars omitted),...,,F (... 1 chars omitted)]"; + CheckPrimitive(options, is_valid, values, expected, false); } template @@ -1103,6 +1122,12 @@ TEST_F(TestPrettyPrint, FixedSizeBinaryType) { CheckArray(*array, {0, 10}, ex); static const char* ex_2 = " [\n 666F6F,\n ...\n 62617A\n ]"; CheckArray(*array, {2, 1}, ex_2); + + auto options = PrettyPrintOptions{2, 1}; + options.element_size_limit = 3; + static const char* ex_3 = + " [\n 666 (... 3 chars omitted),\n ...\n 626 (... 3 chars omitted)\n ]"; + CheckArray(*array, options, ex_3); } TEST_F(TestPrettyPrint, DecimalTypes) { @@ -1115,6 +1140,12 @@ TEST_F(TestPrettyPrint, DecimalTypes) { static const char* ex = "[\n 123.4567,\n 456.7891,\n null\n]"; CheckArray(*array, {0}, ex); + + auto options = PrettyPrintOptions(); + options.element_size_limit = 3; + static const char* ex_2 = + "[\n 123 (... 5 chars omitted),\n 456 (... 5 chars omitted),\n null\n]"; + CheckArray(*array, options, ex_2); } } @@ -1417,6 +1448,7 @@ lorem: 'Lorem ipsum dolor sit amet, consectetur adipiscing elit. Nulla accumsan sapien commodo massa, vel volutpat orci nisi eu justo. Nulla non blandit sapien. Quisque pretium vestibulum urna eu vehicula.')"; options.truncate_metadata = false; + options.element_size_limit = 10000; Check(*my_schema, options, expected_verbose); // Metadata that exactly fits diff --git a/python/pyarrow/array.pxi b/python/pyarrow/array.pxi index 72856bf1ecc..036c68486e5 100644 --- a/python/pyarrow/array.pxi +++ b/python/pyarrow/array.pxi @@ -1357,7 +1357,8 @@ cdef class Array(_PandasConvertible): return f'{type_format}\n{self}' def to_string(self, *, int indent=2, int top_level_indent=0, int window=10, - int container_window=2, c_bool skip_new_lines=False): + int container_window=2, c_bool skip_new_lines=False, + int element_size_limit=100): """ Render a "pretty-printed" string representation of the Array. @@ -1383,6 +1384,8 @@ cdef class Array(_PandasConvertible): skip_new_lines : bool If the array should be rendered as a single line of text or if each element should be on its own line. + element_size_limit : int, default 100 + Maximum number of characters of a single element before it is truncated. """ cdef: c_string result @@ -1392,6 +1395,7 @@ cdef class Array(_PandasConvertible): options = PrettyPrintOptions(top_level_indent, window) options.skip_new_lines = skip_new_lines options.indent_size = indent + options.element_size_limit = element_size_limit check_status( PrettyPrint( deref(self.ap), diff --git a/python/pyarrow/includes/libarrow.pxd b/python/pyarrow/includes/libarrow.pxd index b7a55c6219b..8c45494f94c 100644 --- a/python/pyarrow/includes/libarrow.pxd +++ b/python/pyarrow/includes/libarrow.pxd @@ -652,6 +652,7 @@ cdef extern from "arrow/api.h" namespace "arrow" nogil: c_bool truncate_metadata c_bool show_field_metadata c_bool show_schema_metadata + int element_size_limit @staticmethod PrettyPrintOptions Defaults() diff --git a/python/pyarrow/table.pxi b/python/pyarrow/table.pxi index e40eee7e5ee..b7ad4c4b953 100644 --- a/python/pyarrow/table.pxi +++ b/python/pyarrow/table.pxi @@ -1,3 +1,4 @@ + # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information @@ -116,7 +117,7 @@ cdef class ChunkedArray(_PandasConvertible): return f"{type_format}\n{self}" def to_string(self, *, int indent=0, int window=5, int container_window=2, - c_bool skip_new_lines=False): + c_bool skip_new_lines=False, int element_size_limit=100): """ Render a "pretty-printed" string representation of the ChunkedArray @@ -137,6 +138,8 @@ cdef class ChunkedArray(_PandasConvertible): skip_new_lines : bool If the array should be rendered as a single line of text or if each element should be on its own line. + element_size_limit : int, default 100 + Maximum number of characters of a single element before it is truncated. Examples -------- @@ -153,6 +156,7 @@ cdef class ChunkedArray(_PandasConvertible): options = PrettyPrintOptions(indent, window) options.skip_new_lines = skip_new_lines options.container_window = container_window + options.element_size_limit = element_size_limit check_status( PrettyPrint( deref(self.chunked_array), diff --git a/python/pyarrow/tests/test_array.py b/python/pyarrow/tests/test_array.py index 1a964cda6c0..7d72e953c85 100644 --- a/python/pyarrow/tests/test_array.py +++ b/python/pyarrow/tests/test_array.py @@ -75,17 +75,32 @@ def test_constructor_raises(): def test_list_format(): - arr = pa.array([[1], None, [2, 3, None]]) + arr = pa.array([["foo"], None, ["bar", "a longer string", None]]) result = arr.to_string() expected = """\ [ [ - 1 + "foo" ], null, [ - 2, - 3, + "bar", + "a longer string", + null + ] +]""" + assert result == expected + + result = arr.to_string(element_size_limit=10) + expected = """\ +[ + [ + "foo" + ], + null, + [ + "bar", + "a longer (... 7 chars omitted)", null ] ]""" diff --git a/python/pyarrow/tests/test_schema.py b/python/pyarrow/tests/test_schema.py index ee4c2eba7f9..a1197ed2d08 100644 --- a/python/pyarrow/tests/test_schema.py +++ b/python/pyarrow/tests/test_schema.py @@ -321,14 +321,15 @@ def test_schema_to_string_with_metadata(): -- field metadata -- key3: 'value3' -- schema metadata -- -lorem: '{lorem}'""" +lorem: '{lorem[:92]} (... {len(lorem) - 91} chars omitted)""" assert my_schema.to_string(truncate_metadata=False, - show_field_metadata=False) == f"""\ + show_field_metadata=False, + element_size_limit=50) == f"""\ foo: int32 not null bar: string -- schema metadata -- -lorem: '{lorem}'""" +lorem: '{lorem[:50 - 8]} (... {len(lorem) - (50 - 9)} chars omitted)""" assert my_schema.to_string(truncate_metadata=False, show_schema_metadata=False) == """\ diff --git a/python/pyarrow/types.pxi b/python/pyarrow/types.pxi index fcd150725a3..d771dc8b684 100644 --- a/python/pyarrow/types.pxi +++ b/python/pyarrow/types.pxi @@ -3566,7 +3566,7 @@ cdef class Schema(_Weakrefable): return pyarrow_wrap_schema(new_schema) def to_string(self, truncate_metadata=True, show_field_metadata=True, - show_schema_metadata=True): + show_schema_metadata=True, element_size_limit=100): """ Return human-readable representation of Schema @@ -3579,6 +3579,8 @@ cdef class Schema(_Weakrefable): Display Field-level KeyValueMetadata show_schema_metadata : boolean, default True Display Schema-level KeyValueMetadata + element_size_limit : int, default 100 + Maximum number of characters of a single element before it is truncated. Returns ------- @@ -3592,6 +3594,7 @@ cdef class Schema(_Weakrefable): options.truncate_metadata = truncate_metadata options.show_field_metadata = show_field_metadata options.show_schema_metadata = show_schema_metadata + options.element_size_limit = element_size_limit with nogil: check_status( From 3418c62a7dda7ada4a4209e0216cd0c9278c7889 Mon Sep 17 00:00:00 2001 From: Sutou Kouhei Date: Tue, 10 Jun 2025 05:32:19 +0900 Subject: [PATCH 32/63] GH-46736: [CI] Disable Parquet in conan-minimum (#46744) ### Rationale for this change #45459 introduced RapidJSON dependency to Parquet support. Conan recipe enables Parquet by default but it doesn't enable RapidJSON by default. So we can't find RapidJSON. ### What changes are included in this PR? Disable Parquet by default. We should report "Parquet support requires RapidJSON support" to Conan when we release 21.0.0. ### Are these changes tested? Yes. ### Are there any user-facing changes? No. * GitHub Issue: #46736 Authored-by: Sutou Kouhei Signed-off-by: Sutou Kouhei --- ci/scripts/conan_build.sh | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/ci/scripts/conan_build.sh b/ci/scripts/conan_build.sh index 03e5cab8426..ed8aea428ee 100755 --- a/ci/scripts/conan_build.sh +++ b/ci/scripts/conan_build.sh @@ -30,8 +30,11 @@ conan_args=() conan_args+=(--build=missing) if [ -n "${ARROW_CONAN_PARQUET:-}" ]; then conan_args+=(--options arrow/*:parquet=${ARROW_CONAN_PARQUET}) - conan_args+=(--options arrow/*:with_thrift=${ARROW_CONAN_PARQUET}) conan_args+=(--options arrow/*:with_boost=${ARROW_CONAN_PARQUET}) + conan_args+=(--options arrow/*:with_json=${ARROW_CONAN_PARQUET}) + conan_args+=(--options arrow/*:with_thrift=${ARROW_CONAN_PARQUET}) +else + conan_args+=(--options arrow/*:parquet=False) fi if [ -n "${ARROW_CONAN_WITH_BROTLI:-}" ]; then conan_args+=(--options arrow/*:with_brotli=${ARROW_CONAN_WITH_BROTLI}) From c930e5766b2e7095f0a9842766d2ecd4e8e09bcb Mon Sep 17 00:00:00 2001 From: "dependabot[bot]" <49699333+dependabot[bot]@users.noreply.github.com> Date: Tue, 10 Jun 2025 08:39:52 +0900 Subject: [PATCH 33/63] MINOR: [CI] Bump conda-incubator/setup-miniconda from 3.1.1 to 3.2.0 (#46751) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Bumps [conda-incubator/setup-miniconda](https://github.com/conda-incubator/setup-miniconda) from 3.1.1 to 3.2.0.
Release notes

Sourced from conda-incubator/setup-miniconda's releases.

Version 3.2.0

Fixes

Tasks and Maintenance

Changelog

Sourced from conda-incubator/setup-miniconda's changelog.

CHANGELOG

[v3.2.0] (2025-06-04)

Fixes

Tasks and Maintenance

v3.1.1 (2025-01-20)

Fixes

  • #378: Make nodefaults warning more explicit
  • #387: Detect and support Linux ARM runners for both Miniconda and Miniforge

Tasks and Maintenance

  • #374: Bump conda-incubator/setup-miniconda from 3.0.4 to 3.1.0
  • #375: Bump actions/cache from 3 to 4
  • #384: Bump @​actions/tool-cache from 2.0.1 to 2.0.2
  • #386: Fix link to example 14
  • #388: Fix mamba 1.x examples

#374: conda-incubator/setup-miniconda#374 #375: conda-incubator/setup-miniconda#375 #378: conda-incubator/setup-miniconda#378 #384: conda-incubator/setup-miniconda#384 #386: conda-incubator/setup-miniconda#386 #387: conda-incubator/setup-miniconda#387 #388: conda-incubator/setup-miniconda#388

[v3.1.0] (2024-10-31)

Features

... (truncated)

Commits
  • 8352349 Merge pull request #403 from conda-incubator/prepare-3.2.0
  • 07746af Format via npm run prettier:format
  • b0c4eb8 Update CHANGELOG.md
  • 5dc2927 Bump semver from 7.7.1 to 7.7.2 (#400)
  • 91c4acc Merge pull request #402 from conda-incubator/auto-activate-base
  • 0b3329e Workaround for auto_activate_base deprecation
  • 7470d07 Merge pull request #399 from conda-incubator/dependabot/npm_and_yarn/main/mul...
  • 598ddbc Commit result of npm run build
  • f0b2634 Bump semver and @​types/semver
  • e497204 Bump undici from 5.28.4 to 5.28.5 (#390)
  • Additional commits viewable in compare view

[![Dependabot compatibility score](https://dependabot-badges.githubapp.com/badges/compatibility_score?dependency-name=conda-incubator/setup-miniconda&package-manager=github_actions&previous-version=3.1.1&new-version=3.2.0)](https://docs.github.com/en/github/managing-security-vulnerabilities/about-dependabot-security-updates#about-compatibility-scores) Dependabot will resolve any conflicts with this PR as long as you don't alter it yourself. You can also trigger a rebase manually by commenting `@ dependabot rebase`. [//]: # (dependabot-automerge-start) [//]: # (dependabot-automerge-end) ---
Dependabot commands and options
You can trigger Dependabot actions by commenting on this PR: - `@ dependabot rebase` will rebase this PR - `@ dependabot recreate` will recreate this PR, overwriting any edits that have been made to it - `@ dependabot merge` will merge this PR after your CI passes on it - `@ dependabot squash and merge` will squash and merge this PR after your CI passes on it - `@ dependabot cancel merge` will cancel a previously requested merge and block automerging - `@ dependabot reopen` will reopen this PR if it is closed - `@ dependabot close` will close this PR and stop Dependabot recreating it. You can achieve the same result by closing it manually - `@ dependabot show ignore conditions` will show all of the ignore conditions of the specified dependency - `@ dependabot ignore this major version` will close this PR and stop Dependabot creating any more for this major version (unless you reopen the PR or upgrade to it yourself) - `@ dependabot ignore this minor version` will close this PR and stop Dependabot creating any more for this minor version (unless you reopen the PR or upgrade to it yourself) - `@ dependabot ignore this dependency` will close this PR and stop Dependabot creating any more for this dependency (unless you reopen the PR or upgrade to it yourself)
Authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com> Signed-off-by: Sutou Kouhei --- .github/workflows/verify_rc.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/verify_rc.yml b/.github/workflows/verify_rc.yml index 260640b4273..7a9561c5a54 100644 --- a/.github/workflows/verify_rc.yml +++ b/.github/workflows/verify_rc.yml @@ -198,7 +198,7 @@ jobs: - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 with: submodules: recursive - - uses: conda-incubator/setup-miniconda@505e6394dae86d6a5c7fbb6e3fb8938e3e863830 # v3.1.1 + - uses: conda-incubator/setup-miniconda@835234971496cad1653abb28a638a281cf32541f # v3.2.0 - name: Install System Dependencies run: | choco install --no-progress --yes boost-msvc-14.1 From 4bfdb81996dc18e6f2cc564e57555598e3e7ff9a Mon Sep 17 00:00:00 2001 From: Hiroyuki Sato Date: Tue, 10 Jun 2025 14:43:52 +0900 Subject: [PATCH 34/63] GH-46726: [CI][Dev] fix shellcheck errors in the ci/scripts/conan_build.sh (#46727) ### Rationale for this change This is the sub issue #44748. * SC2046 -- Quote this to prevent word splitt... * SC2206 -- Quote to prevent word splitting/g... * SC2086 -- Double quote to prevent globbing ... ``` In ci/scripts/conan_build.sh line 32: conan_args+=(--options arrow/*:parquet=${ARROW_CONAN_PARQUET}) ^--------------------^ SC2206 (warning): Quote to prevent word splitting/globbing, or split robustly with mapfile or read -a. In ci/scripts/conan_build.sh line 33: conan_args+=(--options arrow/*:with_thrift=${ARROW_CONAN_PARQUET}) ^--------------------^ SC2206 (warning): Quote to prevent word splitting/globbing, or split robustly with mapfile or read -a. In ci/scripts/conan_build.sh line 34: conan_args+=(--options arrow/*:with_boost=${ARROW_CONAN_PARQUET}) ^--------------------^ SC2206 (warning): Quote to prevent word splitting/globbing, or split robustly with mapfile or read -a. In ci/scripts/conan_build.sh line 37: conan_args+=(--options arrow/*:with_brotli=${ARROW_CONAN_WITH_BROTLI}) ^------------------------^ SC2206 (warning): Quote to prevent word splitting/globbing, or split robustly with mapfile or read -a. In ci/scripts/conan_build.sh line 40: conan_args+=(--options arrow/*:with_bz2=${ARROW_CONAN_WITH_BZ2}) ^---------------------^ SC2206 (warning): Quote to prevent word splitting/globbing, or split robustly with mapfile or read -a. In ci/scripts/conan_build.sh line 43: conan_args+=(--options arrow/*:with_flight_rpc=${ARROW_CONAN_WITH_FLIGHT_RPC}) ^----------------------------^ SC2206 (warning): Quote to prevent word splitting/globbing, or split robustly with mapfile or read -a. In ci/scripts/conan_build.sh line 44: conan_args+=(--options arrow/*:with_grpc=${ARROW_CONAN_WITH_FLIGHT_RPC}) ^----------------------------^ SC2206 (warning): Quote to prevent word splitting/globbing, or split robustly with mapfile or read -a. In ci/scripts/conan_build.sh line 45: conan_args+=(--options arrow/*:with_protobuf=${ARROW_CONAN_WITH_FLIGHT_RPC}) ^----------------------------^ SC2206 (warning): Quote to prevent word splitting/globbing, or split robustly with mapfile or read -a. In ci/scripts/conan_build.sh line 46: conan_args+=(--options arrow/*:with_re2=${ARROW_CONAN_WITH_FLIGHT_RPC}) ^----------------------------^ SC2206 (warning): Quote to prevent word splitting/globbing, or split robustly with mapfile or read -a. In ci/scripts/conan_build.sh line 49: conan_args+=(--options arrow/*:with_glog=${ARROW_CONAN_WITH_GLOG}) ^----------------------^ SC2206 (warning): Quote to prevent word splitting/globbing, or split robustly with mapfile or read -a. In ci/scripts/conan_build.sh line 52: conan_args+=(--options arrow/*:with_jemalloc=${ARROW_CONAN_WITH_JEMALLOC}) ^--------------------------^ SC2206 (warning): Quote to prevent word splitting/globbing, or split robustly with mapfile or read -a. In ci/scripts/conan_build.sh line 55: conan_args+=(--options arrow/*:with_json=${ARROW_CONAN_WITH_JSON}) ^----------------------^ SC2206 (warning): Quote to prevent word splitting/globbing, or split robustly with mapfile or read -a. In ci/scripts/conan_build.sh line 58: conan_args+=(--options arrow/*:with_lz4=${ARROW_CONAN_WITH_LZ4}) ^---------------------^ SC2206 (warning): Quote to prevent word splitting/globbing, or split robustly with mapfile or read -a. In ci/scripts/conan_build.sh line 61: conan_args+=(--options arrow/*:with_snappy=${ARROW_CONAN_WITH_SNAPPY}) ^------------------------^ SC2206 (warning): Quote to prevent word splitting/globbing, or split robustly with mapfile or read -a. In ci/scripts/conan_build.sh line 64: conan_args+=(--options arrow/*:with_zstd=${ARROW_CONAN_WITH_ZSTD}) ^----------------------^ SC2206 (warning): Quote to prevent word splitting/globbing, or split robustly with mapfile or read -a. In ci/scripts/conan_build.sh line 67: version=$(grep '^set(ARROW_VERSION ' ${ARROW_HOME}/cpp/CMakeLists.txt | \ ^-----------^ SC2086 (info): Double quote to prevent globbing and word splitting. Did you mean: version=$(grep '^set(ARROW_VERSION ' "${ARROW_HOME}"/cpp/CMakeLists.txt | \ In ci/scripts/conan_build.sh line 69: conan_args+=(--version ${version}) ^--------^ SC2206 (warning): Quote to prevent word splitting/globbing, or split robustly with mapfile or read -a. In ci/scripts/conan_build.sh line 72: rm -rf ${build_dir}/conan || sudo rm -rf ${build_dir}/conan ^----------^ SC2086 (info): Double quote to prevent globbing and word splitting. ^----------^ SC2086 (info): Double quote to prevent globbing and word splitting. Did you mean: rm -rf "${build_dir}"/conan || sudo rm -rf "${build_dir}"/conan In ci/scripts/conan_build.sh line 73: mkdir -p ${build_dir}/conan || sudo mkdir -p ${build_dir}/conan ^----------^ SC2086 (info): Double quote to prevent globbing and word splitting. ^----------^ SC2086 (info): Double quote to prevent globbing and word splitting. Did you mean: mkdir -p "${build_dir}"/conan || sudo mkdir -p "${build_dir}"/conan In ci/scripts/conan_build.sh line 74: if [ -w ${build_dir} ]; then ^----------^ SC2086 (info): Double quote to prevent globbing and word splitting. Did you mean: if [ -w "${build_dir}" ]; then In ci/scripts/conan_build.sh line 75: cp -a ${source_dir}/ci/conan/* ${build_dir}/conan/ ^-----------^ SC2086 (info): Double quote to prevent globbing and word splitting. ^----------^ SC2086 (info): Double quote to prevent globbing and word splitting. Did you mean: cp -a "${source_dir}"/ci/conan/* "${build_dir}"/conan/ In ci/scripts/conan_build.sh line 77: sudo cp -a ${source_dir}/ci/conan/* ${build_dir}/conan/ ^-----------^ SC2086 (info): Double quote to prevent globbing and word splitting. ^----------^ SC2086 (info): Double quote to prevent globbing and word splitting. Did you mean: sudo cp -a "${source_dir}"/ci/conan/* "${build_dir}"/conan/ In ci/scripts/conan_build.sh line 78: sudo chown -R $(id -u):$(id -g) ${build_dir}/conan/ ^------^ SC2046 (warning): Quote this to prevent word splitting. ^------^ SC2046 (warning): Quote this to prevent word splitting. ^----------^ SC2086 (info): Double quote to prevent globbing and word splitting. Did you mean: sudo chown -R $(id -u):$(id -g) "${build_dir}"/conan/ In ci/scripts/conan_build.sh line 80: cd ${build_dir}/conan/all ^----------^ SC2086 (info): Double quote to prevent globbing and word splitting. Did you mean: cd "${build_dir}"/conan/all For more information: https://www.shellcheck.net/wiki/SC2046 -- Quote this to prevent word splitt... https://www.shellcheck.net/wiki/SC2206 -- Quote to prevent word splitting/g... https://www.shellcheck.net/wiki/SC2086 -- Double quote to prevent globbing ... ``` ### What changes are included in this PR? Add quote like `"${ARROW_CONAN_PARQUET}"` ### Are these changes tested? Yes. ### Are there any user-facing changes? No. * GitHub Issue: #46726 Lead-authored-by: Hiroyuki Sato Co-authored-by: Sutou Kouhei Signed-off-by: Sutou Kouhei --- .pre-commit-config.yaml | 1 + ci/scripts/conan_build.sh | 52 +++++++++++++++++++-------------------- 2 files changed, 27 insertions(+), 26 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index d70fe523ca9..2d6d24c416b 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -299,6 +299,7 @@ repos: ?^c_glib/test/run-test\.sh$| ?^ci/scripts/c_glib_build\.sh$| ?^ci/scripts/c_glib_test\.sh$| + ?^ci/scripts/conan_build\.sh$| ?^ci/scripts/conan_setup\.sh$| ?^ci/scripts/cpp_test\.sh$| ?^ci/scripts/csharp_build\.sh$| diff --git a/ci/scripts/conan_build.sh b/ci/scripts/conan_build.sh index ed8aea428ee..15c73d9d25a 100755 --- a/ci/scripts/conan_build.sh +++ b/ci/scripts/conan_build.sh @@ -29,56 +29,56 @@ export ARROW_HOME=${source_dir} conan_args=() conan_args+=(--build=missing) if [ -n "${ARROW_CONAN_PARQUET:-}" ]; then - conan_args+=(--options arrow/*:parquet=${ARROW_CONAN_PARQUET}) - conan_args+=(--options arrow/*:with_boost=${ARROW_CONAN_PARQUET}) - conan_args+=(--options arrow/*:with_json=${ARROW_CONAN_PARQUET}) - conan_args+=(--options arrow/*:with_thrift=${ARROW_CONAN_PARQUET}) + conan_args+=(--options "arrow/*:parquet=${ARROW_CONAN_PARQUET}") + conan_args+=(--options "arrow/*:with_boost=${ARROW_CONAN_PARQUET}") + conan_args+=(--options "arrow/*:with_json=${ARROW_CONAN_PARQUET}") + conan_args+=(--options "arrow/*:with_thrift=${ARROW_CONAN_PARQUET}") else - conan_args+=(--options arrow/*:parquet=False) + conan_args+=(--options "arrow/*:parquet=False") fi if [ -n "${ARROW_CONAN_WITH_BROTLI:-}" ]; then - conan_args+=(--options arrow/*:with_brotli=${ARROW_CONAN_WITH_BROTLI}) + conan_args+=(--options "arrow/*:with_brotli=${ARROW_CONAN_WITH_BROTLI}") fi if [ -n "${ARROW_CONAN_WITH_BZ2:-}" ]; then - conan_args+=(--options arrow/*:with_bz2=${ARROW_CONAN_WITH_BZ2}) + conan_args+=(--options "arrow/*:with_bz2=${ARROW_CONAN_WITH_BZ2}") fi if [ -n "${ARROW_CONAN_WITH_FLIGHT_RPC:-}" ]; then - conan_args+=(--options arrow/*:with_flight_rpc=${ARROW_CONAN_WITH_FLIGHT_RPC}) - conan_args+=(--options arrow/*:with_grpc=${ARROW_CONAN_WITH_FLIGHT_RPC}) - conan_args+=(--options arrow/*:with_protobuf=${ARROW_CONAN_WITH_FLIGHT_RPC}) - conan_args+=(--options arrow/*:with_re2=${ARROW_CONAN_WITH_FLIGHT_RPC}) + conan_args+=(--options "arrow/*:with_flight_rpc=${ARROW_CONAN_WITH_FLIGHT_RPC}") + conan_args+=(--options "arrow/*:with_grpc=${ARROW_CONAN_WITH_FLIGHT_RPC}") + conan_args+=(--options "arrow/*:with_protobuf=${ARROW_CONAN_WITH_FLIGHT_RPC}") + conan_args+=(--options "arrow/*:with_re2=${ARROW_CONAN_WITH_FLIGHT_RPC}") fi if [ -n "${ARROW_CONAN_WITH_GLOG:-}" ]; then - conan_args+=(--options arrow/*:with_glog=${ARROW_CONAN_WITH_GLOG}) + conan_args+=(--options "arrow/*:with_glog=${ARROW_CONAN_WITH_GLOG}") fi if [ -n "${ARROW_CONAN_WITH_JEMALLOC:-}" ]; then - conan_args+=(--options arrow/*:with_jemalloc=${ARROW_CONAN_WITH_JEMALLOC}) + conan_args+=(--options "arrow/*:with_jemalloc=${ARROW_CONAN_WITH_JEMALLOC}") fi if [ -n "${ARROW_CONAN_WITH_JSON:-}" ]; then - conan_args+=(--options arrow/*:with_json=${ARROW_CONAN_WITH_JSON}) + conan_args+=(--options "arrow/*:with_json=${ARROW_CONAN_WITH_JSON}") fi if [ -n "${ARROW_CONAN_WITH_LZ4:-}" ]; then - conan_args+=(--options arrow/*:with_lz4=${ARROW_CONAN_WITH_LZ4}) + conan_args+=(--options "arrow/*:with_lz4=${ARROW_CONAN_WITH_LZ4}") fi if [ -n "${ARROW_CONAN_WITH_SNAPPY:-}" ]; then - conan_args+=(--options arrow/*:with_snappy=${ARROW_CONAN_WITH_SNAPPY}) + conan_args+=(--options "arrow/*:with_snappy=${ARROW_CONAN_WITH_SNAPPY}") fi if [ -n "${ARROW_CONAN_WITH_ZSTD:-}" ]; then - conan_args+=(--options arrow/*:with_zstd=${ARROW_CONAN_WITH_ZSTD}) + conan_args+=(--options "arrow/*:with_zstd=${ARROW_CONAN_WITH_ZSTD}") fi -version=$(grep '^set(ARROW_VERSION ' ${ARROW_HOME}/cpp/CMakeLists.txt | \ +version=$(grep '^set(ARROW_VERSION ' "${ARROW_HOME}/cpp/CMakeLists.txt" | \ grep -E -o '([0-9.]*)') -conan_args+=(--version ${version}) +conan_args+=(--version "${version}") rm -rf ~/.conan/data/arrow/ -rm -rf ${build_dir}/conan || sudo rm -rf ${build_dir}/conan -mkdir -p ${build_dir}/conan || sudo mkdir -p ${build_dir}/conan -if [ -w ${build_dir} ]; then - cp -a ${source_dir}/ci/conan/* ${build_dir}/conan/ +rm -rf "${build_dir}/conan" || sudo rm -rf "${build_dir}/conan" +mkdir -p "${build_dir}/conan" || sudo mkdir -p "${build_dir}/conan" +if [ -w "${build_dir}" ]; then + cp -a "${source_dir}"/ci/conan/* "${build_dir}/conan/" else - sudo cp -a ${source_dir}/ci/conan/* ${build_dir}/conan/ - sudo chown -R $(id -u):$(id -g) ${build_dir}/conan/ + sudo cp -a "${source_dir}"/ci/conan/* "${build_dir}/conan/" + sudo chown -R "$(id -u):$(id -g)" "${build_dir}/conan/" fi -cd ${build_dir}/conan/all +cd "${build_dir}/conan/all" conan create . "${conan_args[@]}" "$@" From 8ca329800e7c6e828d4bf8d0c39e4b3259ae504c Mon Sep 17 00:00:00 2001 From: "yuri@FreeBSD" Date: Tue, 10 Jun 2025 00:04:03 -0700 Subject: [PATCH 35/63] GH-46761: [C++] Add executable detection on FreeBSD (#46759) ### Rationale for this change `arrow::util::Process::Impl::ResolveCurrentExecutable()` isn't available on FreeBSD: https://github.com/apache/arrow/blob/b630f48f8464e770341e053e2fb328bd857bf72e/cpp/src/arrow/testing/process.cc#L275-L307 ### What changes are included in this PR? Use `/proc/curproc/file` on FreeBSD. ### Are these changes tested? Yes. ### Are there any user-facing changes? No. * GitHub Issue: #46761 Authored-by: Yuri Victorovich Signed-off-by: Sutou Kouhei --- cpp/src/arrow/testing/process.cc | 2 ++ 1 file changed, 2 insertions(+) diff --git a/cpp/src/arrow/testing/process.cc b/cpp/src/arrow/testing/process.cc index 45b92af0dde..e32703bd9d9 100644 --- a/cpp/src/arrow/testing/process.cc +++ b/cpp/src/arrow/testing/process.cc @@ -281,6 +281,8 @@ class Process::Impl { # if defined(__linux__) path = filesystem::canonical("/proc/self/exe", error_code); +# elif defined(__FreeBSD__) + path = filesystem::canonical("/proc/curproc/file", error_code); # elif defined(__APPLE__) char buf[PATH_MAX + 1]; uint32_t bufsize = sizeof(buf); From 1c1c8bab33853c478f2c35ebcb9f120ed9c4b7a6 Mon Sep 17 00:00:00 2001 From: Nic Crane Date: Tue, 10 Jun 2025 09:38:34 +0100 Subject: [PATCH 36/63] GH-45653: [Python] Scalar subclasses should implement Python protocols (#45818) ### Rationale for this change Implement dunder methods on Scalar objects ### What changes are included in this PR? * integer scalars implement `__int__` * floating-point scalars implement `__float__` * binary scalars implement [`__bytes__`](https://docs.python.org/3.13/reference/datamodel.html#object.__bytes__) * binary scalars implement the [buffer protocol](https://docs.python.org/3.13/reference/datamodel.html#object.__buffer__) * we explicitly test that Struct scalars implement Sequences * Map scalar implement mapping ### Are these changes tested? Yes ### Are there any user-facing changes? Yes * GitHub Issue: #45653 Lead-authored-by: Nic Crane Co-authored-by: Alenka Frim Co-authored-by: Antoine Pitrou Signed-off-by: Antoine Pitrou --- docs/source/python/compute.rst | 15 +++++ python/pyarrow/scalar.pxi | 90 ++++++++++++++++++++++++++-- python/pyarrow/tests/test_scalars.py | 48 ++++++++++++++- 3 files changed, 144 insertions(+), 9 deletions(-) diff --git a/docs/source/python/compute.rst b/docs/source/python/compute.rst index c2b46c8f3f6..397af9d2c51 100644 --- a/docs/source/python/compute.rst +++ b/docs/source/python/compute.rst @@ -63,6 +63,21 @@ Below are a few simple examples:: >>> pc.multiply(x, y) +If you are using a compute function which returns more than one value, results +will be returned as a ``StructScalar``. You can extract the individual values by +calling the :meth:`pyarrow.StructScalar.values` method:: + + >>> import pyarrow as pa + >>> import pyarrow.compute as pc + >>> a = pa.array([1, 1, 2, 3]) + >>> pc.min_max(a) + + >>> a, b = pc.min_max(a).values() + >>> a + + >>> b + + These functions can do more than just element-by-element operations. Here is an example of sorting a table:: diff --git a/python/pyarrow/scalar.pxi b/python/pyarrow/scalar.pxi index a9cdcff4e41..5934a7aa8cf 100644 --- a/python/pyarrow/scalar.pxi +++ b/python/pyarrow/scalar.pxi @@ -18,6 +18,7 @@ import collections import warnings from uuid import UUID +from collections.abc import Sequence, Mapping cdef class Scalar(_Weakrefable): @@ -219,6 +220,8 @@ cdef class BooleanScalar(Scalar): cdef CBooleanScalar* sp = self.wrapped.get() return sp.value if sp.is_valid else None + def __bool__(self): + return self.as_py() or False cdef class UInt8Scalar(Scalar): """ @@ -238,6 +241,9 @@ cdef class UInt8Scalar(Scalar): cdef CUInt8Scalar* sp = self.wrapped.get() return sp.value if sp.is_valid else None + def __index__(self): + return self.as_py() + cdef class Int8Scalar(Scalar): """ @@ -257,6 +263,9 @@ cdef class Int8Scalar(Scalar): cdef CInt8Scalar* sp = self.wrapped.get() return sp.value if sp.is_valid else None + def __index__(self): + return self.as_py() + cdef class UInt16Scalar(Scalar): """ @@ -276,6 +285,9 @@ cdef class UInt16Scalar(Scalar): cdef CUInt16Scalar* sp = self.wrapped.get() return sp.value if sp.is_valid else None + def __index__(self): + return self.as_py() + cdef class Int16Scalar(Scalar): """ @@ -295,6 +307,9 @@ cdef class Int16Scalar(Scalar): cdef CInt16Scalar* sp = self.wrapped.get() return sp.value if sp.is_valid else None + def __index__(self): + return self.as_py() + cdef class UInt32Scalar(Scalar): """ @@ -314,6 +329,9 @@ cdef class UInt32Scalar(Scalar): cdef CUInt32Scalar* sp = self.wrapped.get() return sp.value if sp.is_valid else None + def __index__(self): + return self.as_py() + cdef class Int32Scalar(Scalar): """ @@ -333,6 +351,9 @@ cdef class Int32Scalar(Scalar): cdef CInt32Scalar* sp = self.wrapped.get() return sp.value if sp.is_valid else None + def __index__(self): + return self.as_py() + cdef class UInt64Scalar(Scalar): """ @@ -352,6 +373,9 @@ cdef class UInt64Scalar(Scalar): cdef CUInt64Scalar* sp = self.wrapped.get() return sp.value if sp.is_valid else None + def __index__(self): + return self.as_py() + cdef class Int64Scalar(Scalar): """ @@ -371,6 +395,9 @@ cdef class Int64Scalar(Scalar): cdef CInt64Scalar* sp = self.wrapped.get() return sp.value if sp.is_valid else None + def __index__(self): + return self.as_py() + cdef class HalfFloatScalar(Scalar): """ @@ -390,6 +417,12 @@ cdef class HalfFloatScalar(Scalar): cdef CHalfFloatScalar* sp = self.wrapped.get() return PyFloat_FromHalf(sp.value) if sp.is_valid else None + def __float__(self): + return self.as_py() + + def __int__(self): + return int(self.as_py()) + cdef class FloatScalar(Scalar): """ @@ -409,6 +442,12 @@ cdef class FloatScalar(Scalar): cdef CFloatScalar* sp = self.wrapped.get() return sp.value if sp.is_valid else None + def __float__(self): + return self.as_py() + + def __int__(self): + return int(float(self)) + cdef class DoubleScalar(Scalar): """ @@ -428,6 +467,12 @@ cdef class DoubleScalar(Scalar): cdef CDoubleScalar* sp = self.wrapped.get() return sp.value if sp.is_valid else None + def __float__(self): + return self.as_py() + + def __int__(self): + return int(float(self)) + cdef class Decimal32Scalar(Scalar): """ @@ -843,6 +888,15 @@ cdef class BinaryScalar(Scalar): buffer = self.as_buffer() return None if buffer is None else buffer.to_pybytes() + def __bytes__(self): + return self.as_py() + + def __getbuffer__(self, cp.Py_buffer* buffer, int flags): + buf = self.as_buffer() + if buf is None: + raise ValueError("Cannot export buffer from null Arrow Scalar") + cp.PyObject_GetBuffer(buf, buffer, flags) + cdef class LargeBinaryScalar(BinaryScalar): pass @@ -883,7 +937,7 @@ cdef class StringViewScalar(StringScalar): pass -cdef class ListScalar(Scalar): +cdef class ListScalar(Scalar, Sequence): """ Concrete class for list-like scalars. """ @@ -952,7 +1006,7 @@ cdef class LargeListViewScalar(ListScalar): pass -cdef class StructScalar(Scalar, collections.abc.Mapping): +cdef class StructScalar(Scalar, Mapping): """ Concrete class for struct scalars. """ @@ -1051,20 +1105,34 @@ cdef class StructScalar(Scalar, collections.abc.Mapping): return str(self._as_py_tuple()) -cdef class MapScalar(ListScalar): +cdef class MapScalar(ListScalar, Mapping): """ Concrete class for map scalars. """ def __getitem__(self, i): """ - Return the value at the given index. + Return the value at the given index or key. """ + arr = self.values if arr is None: - raise IndexError(i) + raise IndexError(i) if isinstance(i, int) else KeyError(i) + + key_field = self.type.key_field.name + item_field = self.type.item_field.name + + if isinstance(i, (bytes, str)): + try: + key_index = list(self.keys()).index(i) + except ValueError: + raise KeyError(i) + + dct = arr[_normalize_index(key_index, len(arr))] + return dct[item_field] + dct = arr[_normalize_index(i, len(arr))] - return (dct[self.type.key_field.name], dct[self.type.item_field.name]) + return (dct[key_field], dct[item_field]) def __iter__(self): """ @@ -1118,6 +1186,16 @@ cdef class MapScalar(ListScalar): result_dict[key] = value return result_dict + def keys(self): + """ + Return the keys of the map as a list. + """ + arr = self.values + if arr is None: + return [] + key_field = self.type.key_field.name + return [k.as_py() for k in arr.field(key_field)] + cdef class DictionaryScalar(Scalar): """ diff --git a/python/pyarrow/tests/test_scalars.py b/python/pyarrow/tests/test_scalars.py index 14f6ccef626..0f62dd98f82 100644 --- a/python/pyarrow/tests/test_scalars.py +++ b/python/pyarrow/tests/test_scalars.py @@ -19,6 +19,7 @@ import decimal import pytest import weakref +from collections.abc import Sequence, Mapping try: import numpy as np @@ -208,17 +209,26 @@ def test_timestamp_scalar(): def test_bool(): false = pa.scalar(False) true = pa.scalar(True) + null = pa.scalar(None, type=pa.bool_()) assert isinstance(false, pa.BooleanScalar) assert isinstance(true, pa.BooleanScalar) + assert isinstance(null, pa.BooleanScalar) assert repr(true) == "" assert str(true) == "True" assert repr(false) == "" assert str(false) == "False" + assert repr(null) == "" + assert str(null) == "None" assert true.as_py() is True assert false.as_py() is False + assert null.as_py() is None + + assert bool(true) is True + assert bool(false) is False + assert bool(null) is False def test_numerics(): @@ -228,6 +238,7 @@ def test_numerics(): assert repr(s) == "" assert str(s) == "1" assert s.as_py() == 1 + assert int(s) == 1 with pytest.raises(OverflowError): pa.scalar(-1, type='uint8') @@ -238,6 +249,8 @@ def test_numerics(): assert repr(s) == "" assert str(s) == "1.5" assert s.as_py() == 1.5 + assert float(s) == 1.5 + assert int(s) == 1 # float16 s = pa.scalar(0.5, type='float16') @@ -245,6 +258,8 @@ def test_numerics(): assert repr(s) == "" assert str(s) == "0.5" assert s.as_py() == 0.5 + assert float(s) == 0.5 + assert int(s) == 0 def test_decimal128(): @@ -540,7 +555,7 @@ def test_string(value, ty, scalar_typ): assert buf.to_pybytes() == value.encode() -@pytest.mark.parametrize('value', [b'foo', b'bar']) +@pytest.mark.parametrize('value', [b'foo', b'bar', b'', None]) @pytest.mark.parametrize(('ty', 'scalar_typ'), [ (pa.binary(), pa.BinaryScalar), (pa.large_binary(), pa.LargeBinaryScalar), @@ -556,14 +571,30 @@ def test_binary(value, ty, scalar_typ): assert s != b'xxxxx' buf = s.as_buffer() - assert isinstance(buf, pa.Buffer) - assert buf.to_pybytes() == value + + if value is None: + assert buf is None + with pytest.raises(ValueError): + memoryview(s) + else: + assert buf.to_pybytes() == value + assert isinstance(buf, pa.Buffer) + assert bytes(s) == value + + memview = memoryview(s) + assert memview.tobytes() == value + assert memview.format == 'b' + assert memview.itemsize == 1 + assert memview.ndim == 1 + assert memview.shape == (len(value),) + assert memview.strides == (1,) def test_fixed_size_binary(): s = pa.scalar(b'foof', type=pa.binary(4)) assert isinstance(s, pa.FixedSizeBinaryScalar) assert s.as_py() == b'foof' + assert bytes(s) == b'foof' with pytest.raises(pa.ArrowInvalid): pa.scalar(b'foof5', type=pa.binary(4)) @@ -593,6 +624,7 @@ def test_list(ty, klass): s[-3] with pytest.raises(IndexError): s[2] + assert isinstance(s, Sequence) @pytest.mark.numpy @@ -666,6 +698,7 @@ def test_struct(): v = {'x': 2, 'y': 3.5} s = pa.scalar(v, type=ty) assert list(s) == list(s.keys()) == ['x', 'y'] + assert list(s.values()) == [ pa.scalar(2, type=pa.int16()), pa.scalar(3.5, type=pa.float32()) @@ -687,6 +720,7 @@ def test_struct(): assert isinstance(s['y'], pa.FloatScalar) assert s['x'].as_py() == 2 assert s['y'].as_py() == 3.5 + assert isinstance(s, Mapping) with pytest.raises(KeyError): s['nonexistent'] @@ -698,10 +732,13 @@ def test_struct(): assert 'y' in s assert isinstance(s['x'], pa.Int16Scalar) assert isinstance(s['y'], pa.FloatScalar) + assert isinstance(s[0], pa.Int16Scalar) + assert isinstance(s[1], pa.FloatScalar) assert s['x'].is_valid is False assert s['y'].is_valid is False assert s['x'].as_py() is None assert s['y'].as_py() is None + assert isinstance(s, Mapping) def test_struct_duplicate_fields(): @@ -776,16 +813,21 @@ def test_map(pickle_module): ) assert s[-1] == s[1] assert s[-2] == s[0] + assert s['b'] == pa.scalar(2, type=pa.int8()) with pytest.raises(IndexError): s[-3] with pytest.raises(IndexError): s[2] + with pytest.raises(KeyError): + s['fake_key'] restored = pickle_module.loads(pickle_module.dumps(s)) assert restored.equals(s) assert s.as_py(maps_as_pydicts="strict") == {'a': 1, 'b': 2} + assert isinstance(s, Mapping) + def test_map_duplicate_fields(): ty = pa.map_(pa.string(), pa.int8()) From c040d50787462893a7615e31a22a0d1ad16399b2 Mon Sep 17 00:00:00 2001 From: shu-kitamura Date: Tue, 10 Jun 2025 21:52:02 +0900 Subject: [PATCH 37/63] GH-46606: [Python] Do not require numpy when normalizing slice (#46732) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ### Rationale for this change Slicing an array in non-trivial steps raises an exception when Numpy is not installed. #46606 ### What changes are included in this PR? I changed `np.arange(...)` to `list(range(...))` In `python/pyarrow/array.pxi` ### Are these changes tested? Yes ### Are there any user-facing changes? No * GitHub Issue: #46606 Authored-by: shu-kitamura Signed-off-by: Raúl Cumplido --- python/pyarrow/array.pxi | 4 +++- python/pyarrow/tests/test_array.py | 11 +++++------ 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/python/pyarrow/array.pxi b/python/pyarrow/array.pxi index 036c68486e5..fc412990511 100644 --- a/python/pyarrow/array.pxi +++ b/python/pyarrow/array.pxi @@ -584,7 +584,9 @@ def _normalize_slice(object arrow_obj, slice key): start, stop, step = key.indices(n) if step != 1: - indices = np.arange(start, stop, step) + indices = list(range(start, stop, step)) + if len(indices) == 0: + return arrow_obj.slice(0, 0) return arrow_obj.take(indices) else: length = max(stop - start, 0) diff --git a/python/pyarrow/tests/test_array.py b/python/pyarrow/tests/test_array.py index 7d72e953c85..7dabb8396b2 100644 --- a/python/pyarrow/tests/test_array.py +++ b/python/pyarrow/tests/test_array.py @@ -503,15 +503,14 @@ def test_array_slice(): assert res.to_numpy().tolist() == expected -@pytest.mark.numpy def test_array_slice_negative_step(): # ARROW-2714 - np_arr = np.arange(20) - arr = pa.array(np_arr) + values = list(range(20)) + arr = pa.array(values) chunked_arr = pa.chunked_array([arr]) cases = [ - slice(None, None, -1), + slice(None, None, -1), # GH-46606 slice(None, 6, -2), slice(10, 6, -2), slice(8, None, -2), @@ -525,7 +524,7 @@ def test_array_slice_negative_step(): for case in cases: result = arr[case] - expected = pa.array(np_arr[case]) + expected = pa.array(values[case], type=arr.type) assert result.equals(expected) result = pa.record_batch([arr], names=['f0'])[case] @@ -533,7 +532,7 @@ def test_array_slice_negative_step(): assert result.equals(expected) result = chunked_arr[case] - expected = pa.chunked_array([np_arr[case]]) + expected = pa.chunked_array([values[case]], type=arr.type) assert result.equals(expected) From fab0b706bcb26a3474064509bf238a029ab398eb Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ra=C3=BAl=20Cumplido?= Date: Tue, 10 Jun 2025 22:39:10 +0200 Subject: [PATCH 38/63] GH-46516: [CI][Python] Force Cython>3.1.1 for docs builds (#46770) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ### Rationale for this change Due to a change of behaviour on Cython for 3.1 the docstrings generated fail when being tested with numpydoc. The issue was fixed on Cython==3.1.2. - https://github.com/cython/cython/issues/6904 ### What changes are included in this PR? - Pin Cython>3.1.1 for the docs requirements (both conda and pip) - Fix doctest failure that was introduced in the interim when the CI job had been failing. ### Are these changes tested? Via CI. ### Are there any user-facing changes? No * GitHub Issue: #46516 Authored-by: Raúl Cumplido Signed-off-by: Sutou Kouhei --- ci/conda_env_sphinx.txt | 1 + docs/requirements.txt | 1 + python/pyarrow/types.pxi | 2 +- 3 files changed, 3 insertions(+), 1 deletion(-) diff --git a/ci/conda_env_sphinx.txt b/ci/conda_env_sphinx.txt index 840577fdd97..1ab23918bb6 100644 --- a/ci/conda_env_sphinx.txt +++ b/ci/conda_env_sphinx.txt @@ -17,6 +17,7 @@ # Requirements for building the documentation breathe +cython>3.1.1 doxygen ipython linkify-it-py diff --git a/docs/requirements.txt b/docs/requirements.txt index 493528fb5c7..a97781bc97a 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -3,6 +3,7 @@ # breathe +cython>3.1.1 ipython linuxdoc myst-parser[linkify] diff --git a/python/pyarrow/types.pxi b/python/pyarrow/types.pxi index d771dc8b684..9b2f8881e37 100644 --- a/python/pyarrow/types.pxi +++ b/python/pyarrow/types.pxi @@ -4467,7 +4467,7 @@ def float16(): to a python list, the types of its elements will be ``np.float16`` >>> [type(val) for val in a.to_pylist()] - [, ] + [, ] """ return primitive_type(_Type_HALF_FLOAT) From f8f236f72e73807162aa884d77c7b843f80069c3 Mon Sep 17 00:00:00 2001 From: leopardracer <136604165+leopardracer@users.noreply.github.com> Date: Thu, 12 Jun 2025 03:42:06 +0300 Subject: [PATCH 39/63] MINOR: [Docs] Fix Typos in Documentation and Protobuf Comments (#46783) ### Rationale for this change There are some typos in documents and Protobuf comments. ### What changes are included in this PR? This pull request corrects several typographical errors in the documentation and protobuf comments. Specifically: - Fixed the spelling of "occuring" to "occurring" in `CDeviceDataInterface.rst`. - Fixed the spelling of "successfully" in `Flight.rst`. - Fixed the spelling of "continue" in `FlightSql.proto`. These changes improve the clarity and professionalism of the documentation and code comments. No functional code changes are included. ### Are these changes tested? No. ### Are there any user-facing changes? No. Authored-by: leopardracer <136604165+leopardracer@users.noreply.github.com> Signed-off-by: Sutou Kouhei --- docs/source/format/CDeviceDataInterface.rst | 2 +- docs/source/format/Flight.rst | 2 +- format/FlightSql.proto | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/docs/source/format/CDeviceDataInterface.rst b/docs/source/format/CDeviceDataInterface.rst index 19412c605c1..4bd9069d06f 100644 --- a/docs/source/format/CDeviceDataInterface.rst +++ b/docs/source/format/CDeviceDataInterface.rst @@ -848,7 +848,7 @@ This producer-provided structure has the following fields: If a non-zero value is returned from this, it should be followed only by the producer calling the ``on_error`` callback of the ``ArrowAsyncDeviceStreamHandler``. Because calling this method is likely to be separate from the current control flow, returning a non-zero value to signal - an error occuring allows the current thread to decide handle the case accordingly, while still + an error occurring allows the current thread to decide handle the case accordingly, while still allowing all error logging and handling to be centralized in the :c:member:`ArrowAsyncDeviceStreamHandler.on_error` callback. diff --git a/docs/source/format/Flight.rst b/docs/source/format/Flight.rst index 7355a698d06..3fa598114fe 100644 --- a/docs/source/format/Flight.rst +++ b/docs/source/format/Flight.rst @@ -409,7 +409,7 @@ data is returned in by respecting the ``Accept`` header in the request. If multiple formats are requested and supported, the choice of which to use is server-specific. If none of the requested content-types are supported, the server may respond with either 406 (Not Acceptable), -415 (Unsupported Media Type), or successfuly respond with a different +415 (Unsupported Media Type), or successfully respond with a different format that it does support, along with the correct ``Content-Type`` header. diff --git a/format/FlightSql.proto b/format/FlightSql.proto index 25dc1318743..2877a8dad18 100644 --- a/format/FlightSql.proto +++ b/format/FlightSql.proto @@ -1862,7 +1862,7 @@ message DoPutPreparedStatementResult { // statement must use this new handle. // The updated handle allows implementing query parameters with stateless services. // - // When an updated handle is not provided by the server, clients should contiue + // When an updated handle is not provided by the server, clients should continue // using the previous handle provided by `ActionCreatePreparedStatementResonse`. optional bytes prepared_statement_handle = 1; } From 2e75f296a58adbe778987d30294be7020421663b Mon Sep 17 00:00:00 2001 From: Sutou Kouhei Date: Thu, 12 Jun 2025 13:57:09 +0900 Subject: [PATCH 40/63] GH-46764: [C++][Gandiva] Fix wrong `.bc` depends (#46765) ### Rationale for this change The `SOURCE_FILE` variable doesn't exist in this context. ### What changes are included in this PR? Fix a typo. ### Are these changes tested? Yes. ### Are there any user-facing changes? No. * GitHub Issue: #46764 Authored-by: Sutou Kouhei Signed-off-by: Sutou Kouhei --- cpp/cmake_modules/GandivaAddBitcode.cmake | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/cpp/cmake_modules/GandivaAddBitcode.cmake b/cpp/cmake_modules/GandivaAddBitcode.cmake index 98847f8a186..6b5e5b3e60c 100644 --- a/cpp/cmake_modules/GandivaAddBitcode.cmake +++ b/cpp/cmake_modules/GandivaAddBitcode.cmake @@ -71,5 +71,5 @@ function(gandiva_add_bitcode SOURCE) endif() add_custom_command(OUTPUT ${BC_FILE} COMMAND ${PRECOMPILE_COMMAND} - DEPENDS ${SOURCE_FILE}) + DEPENDS ${SOURCE}) endfunction() From ae69dbaf82e7ef3a43c2145e3f5e4a828b8c2184 Mon Sep 17 00:00:00 2001 From: Hiroyuki Sato Date: Thu, 12 Jun 2025 14:00:31 +0900 Subject: [PATCH 41/63] GH-46763: [CI][Dev] fix shellcheck errors in the ci/scripts/ccache_setup.sh (#46766) ### Rationale for this change This is the sub issue #44748. * SC2086: (info): Double quote to prevent globbing and word splitting. * SC2129: Consider using { cmd1; cmd2; } >> file instead of individual redirects. ``` shellcheck ci/scripts/ccache_setup.sh In ci/scripts/ccache_setup.sh line 22: echo "ARROW_USE_CCACHE=ON" >> $GITHUB_ENV ^-- SC2129 (style): Consider using { cmd1; cmd2; } >> file instead of individual redirects. ^---------^ SC2086 (info): Double quote to prevent globbing and word splitting. Did you mean: echo "ARROW_USE_CCACHE=ON" >> "$GITHUB_ENV" In ci/scripts/ccache_setup.sh line 23: echo "CCACHE_COMPILERCHECK=content" >> $GITHUB_ENV ^---------^ SC2086 (info): Double quote to prevent globbing and word splitting. Did you mean: echo "CCACHE_COMPILERCHECK=content" >> "$GITHUB_ENV" In ci/scripts/ccache_setup.sh line 24: echo "CCACHE_COMPRESS=1" >> $GITHUB_ENV ^---------^ SC2086 (info): Double quote to prevent globbing and word splitting. Did you mean: echo "CCACHE_COMPRESS=1" >> "$GITHUB_ENV" In ci/scripts/ccache_setup.sh line 25: echo "CCACHE_COMPRESSLEVEL=6" >> $GITHUB_ENV ^---------^ SC2086 (info): Double quote to prevent globbing and word splitting. Did you mean: echo "CCACHE_COMPRESSLEVEL=6" >> "$GITHUB_ENV" In ci/scripts/ccache_setup.sh line 26: echo "CCACHE_MAXSIZE=1G" >> $GITHUB_ENV ^---------^ SC2086 (info): Double quote to prevent globbing and word splitting. Did you mean: echo "CCACHE_MAXSIZE=1G" >> "$GITHUB_ENV" For more information: https://www.shellcheck.net/wiki/SC2086 -- Double quote to prevent globbing ... https://www.shellcheck.net/wiki/SC2129 -- Consider using { cmd1; cmd2; } >>... palolovalley:arrow hsato$ vi ci/scripts/ccache_setup.sh ``` ### What changes are included in this PR? * SC2086: Quoting like "$GITHUB_ENV" * SC2129: combine multiple commands using `{}` ### Are these changes tested? Yes. ### Are there any user-facing changes? No. * GitHub Issue: #46763 Authored-by: Hiroyuki Sato Signed-off-by: Sutou Kouhei --- .github/workflows/cpp.yml | 2 ++ .github/workflows/cpp_extra.yml | 2 ++ .github/workflows/ruby.yml | 2 ++ .pre-commit-config.yaml | 1 + ci/scripts/ccache_setup.sh | 12 +++++++----- 5 files changed, 14 insertions(+), 5 deletions(-) diff --git a/.github/workflows/cpp.yml b/.github/workflows/cpp.yml index 70332773302..b29b55d5586 100644 --- a/.github/workflows/cpp.yml +++ b/.github/workflows/cpp.yml @@ -29,6 +29,7 @@ on: - '.github/workflows/cpp.yml' - 'ci/conda_env_*' - 'ci/docker/**' + - 'ci/scripts/ccache_setup.sh' - 'ci/scripts/cpp_*' - 'ci/scripts/install_azurite.sh' - 'ci/scripts/install_gcs_testbench.sh' @@ -45,6 +46,7 @@ on: - '.github/workflows/cpp.yml' - 'ci/conda_env_*' - 'ci/docker/**' + - 'ci/scripts/ccache_setup.sh' - 'ci/scripts/cpp_*' - 'ci/scripts/install_azurite.sh' - 'ci/scripts/install_gcs_testbench.sh' diff --git a/.github/workflows/cpp_extra.yml b/.github/workflows/cpp_extra.yml index 3df63568e9f..e982afde91d 100644 --- a/.github/workflows/cpp_extra.yml +++ b/.github/workflows/cpp_extra.yml @@ -27,6 +27,7 @@ on: - '.github/workflows/cpp_extra.yml' - 'ci/conda_env_*' - 'ci/docker/**' + - 'ci/scripts/ccache_setup.sh' - 'ci/scripts/cpp_*' - 'ci/scripts/install_azurite.sh' - 'ci/scripts/install_gcs_testbench.sh' @@ -45,6 +46,7 @@ on: - '.github/workflows/cpp_extra.yml' - 'ci/conda_env_*' - 'ci/docker/**' + - 'ci/scripts/ccache_setup.sh' - 'ci/scripts/cpp_*' - 'ci/scripts/install_azurite.sh' - 'ci/scripts/install_gcs_testbench.sh' diff --git a/.github/workflows/ruby.yml b/.github/workflows/ruby.yml index 8cb16049f0d..af52e733204 100644 --- a/.github/workflows/ruby.yml +++ b/.github/workflows/ruby.yml @@ -29,6 +29,7 @@ on: - '.github/workflows/ruby.yml' - 'ci/docker/**' - 'ci/scripts/c_glib_*' + - 'ci/scripts/ccache_setup.sh' - 'ci/scripts/cpp_*' - 'ci/scripts/msys2_*' - 'ci/scripts/ruby_*' @@ -43,6 +44,7 @@ on: - '.github/workflows/ruby.yml' - 'ci/docker/**' - 'ci/scripts/c_glib_*' + - 'ci/scripts/ccache_setup.sh' - 'ci/scripts/cpp_*' - 'ci/scripts/msys2_*' - 'ci/scripts/ruby_*' diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 2d6d24c416b..c3835ac0f1b 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -299,6 +299,7 @@ repos: ?^c_glib/test/run-test\.sh$| ?^ci/scripts/c_glib_build\.sh$| ?^ci/scripts/c_glib_test\.sh$| + ?^ci/scripts/ccache_setup\.sh$| ?^ci/scripts/conan_build\.sh$| ?^ci/scripts/conan_setup\.sh$| ?^ci/scripts/cpp_test\.sh$| diff --git a/ci/scripts/ccache_setup.sh b/ci/scripts/ccache_setup.sh index 6afcdda7d0a..df00efe702f 100755 --- a/ci/scripts/ccache_setup.sh +++ b/ci/scripts/ccache_setup.sh @@ -19,8 +19,10 @@ set -eux -echo "ARROW_USE_CCACHE=ON" >> $GITHUB_ENV -echo "CCACHE_COMPILERCHECK=content" >> $GITHUB_ENV -echo "CCACHE_COMPRESS=1" >> $GITHUB_ENV -echo "CCACHE_COMPRESSLEVEL=6" >> $GITHUB_ENV -echo "CCACHE_MAXSIZE=1G" >> $GITHUB_ENV +{ + echo "ARROW_USE_CCACHE=ON" + echo "CCACHE_COMPILERCHECK=content" + echo "CCACHE_COMPRESS=1" + echo "CCACHE_COMPRESSLEVEL=6" + echo "CCACHE_MAXSIZE=1G" +} >> "$GITHUB_ENV" From dc4defe9e03be55f4b3bb80849a9e3258513b8a2 Mon Sep 17 00:00:00 2001 From: Antoine Pitrou Date: Thu, 12 Jun 2025 09:59:53 +0200 Subject: [PATCH 42/63] GH-45978: [C++] Bump bundled mimalloc version (#45979) ### What changes are included in this PR? Update bundled mimalloc version from 2.0.6 to 2.2.4. This will also help fix the CMake 4.0 compatibility issues. ### Are these changes tested? Yes, by CI builds. ### Are there any user-facing changes? No. * GitHub Issue: #45978 Authored-by: Antoine Pitrou Signed-off-by: Antoine Pitrou --- cpp/cmake_modules/ThirdpartyToolchain.cmake | 14 +++++++++----- cpp/thirdparty/versions.txt | 4 ++-- 2 files changed, 11 insertions(+), 7 deletions(-) diff --git a/cpp/cmake_modules/ThirdpartyToolchain.cmake b/cpp/cmake_modules/ThirdpartyToolchain.cmake index 00263849494..412207c4bfd 100644 --- a/cpp/cmake_modules/ThirdpartyToolchain.cmake +++ b/cpp/cmake_modules/ThirdpartyToolchain.cmake @@ -2275,21 +2275,25 @@ if(ARROW_MIMALLOC) # We only use a vendored mimalloc as we want to control its build options. set(MIMALLOC_LIB_BASE_NAME "mimalloc") - if(WIN32) - set(MIMALLOC_LIB_BASE_NAME "${MIMALLOC_LIB_BASE_NAME}-static") - endif() if(${UPPERCASE_BUILD_TYPE} STREQUAL "DEBUG") set(MIMALLOC_LIB_BASE_NAME "${MIMALLOC_LIB_BASE_NAME}-${LOWERCASE_BUILD_TYPE}") endif() set(MIMALLOC_PREFIX "${CMAKE_CURRENT_BINARY_DIR}/mimalloc_ep/src/mimalloc_ep") - set(MIMALLOC_INCLUDE_DIR "${MIMALLOC_PREFIX}/include/mimalloc-2.0") + set(MIMALLOC_INCLUDE_DIR "${MIMALLOC_PREFIX}/include/mimalloc-2.2") set(MIMALLOC_STATIC_LIB - "${MIMALLOC_PREFIX}/lib/mimalloc-2.0/${CMAKE_STATIC_LIBRARY_PREFIX}${MIMALLOC_LIB_BASE_NAME}${CMAKE_STATIC_LIBRARY_SUFFIX}" + "${MIMALLOC_PREFIX}/lib/mimalloc-2.2/${CMAKE_STATIC_LIBRARY_PREFIX}${MIMALLOC_LIB_BASE_NAME}${CMAKE_STATIC_LIBRARY_SUFFIX}" ) + set(MIMALLOC_C_FLAGS ${EP_C_FLAGS}) + if(MINGW) + # Workaround https://github.com/microsoft/mimalloc/issues/910 on RTools40 + set(MIMALLOC_C_FLAGS "${MIMALLOC_C_FLAGS} -DERROR_COMMITMENT_MINIMUM=635") + endif() + set(MIMALLOC_CMAKE_ARGS ${EP_COMMON_CMAKE_ARGS} + "-DCMAKE_C_FLAGS=${MIMALLOC_C_FLAGS}" "-DCMAKE_INSTALL_PREFIX=${MIMALLOC_PREFIX}" -DMI_OVERRIDE=OFF -DMI_LOCAL_DYNAMIC_TLS=ON diff --git a/cpp/thirdparty/versions.txt b/cpp/thirdparty/versions.txt index 3a8ad73e137..1fc53c4d5e6 100644 --- a/cpp/thirdparty/versions.txt +++ b/cpp/thirdparty/versions.txt @@ -82,8 +82,8 @@ ARROW_JEMALLOC_BUILD_VERSION=5.3.0 ARROW_JEMALLOC_BUILD_SHA256_CHECKSUM=2db82d1e7119df3e71b7640219b6dfe84789bc0537983c3b7ac4f7189aecfeaa ARROW_LZ4_BUILD_VERSION=v1.10.0 ARROW_LZ4_BUILD_SHA256_CHECKSUM=537512904744b35e232912055ccf8ec66d768639ff3abe5788d90d792ec5f48b -ARROW_MIMALLOC_BUILD_VERSION=v2.0.6 -ARROW_MIMALLOC_BUILD_SHA256_CHECKSUM=9f05c94cc2b017ed13698834ac2a3567b6339a8bde27640df5a1581d49d05ce5 +ARROW_MIMALLOC_BUILD_VERSION=v2.2.4 +ARROW_MIMALLOC_BUILD_SHA256_CHECKSUM=754a98de5e2912fddbeaf24830f982b4540992f1bab4a0a8796ee118e0752bda ARROW_NLOHMANN_JSON_BUILD_VERSION=v3.12.0 ARROW_NLOHMANN_JSON_BUILD_SHA256_CHECKSUM=4b92eb0c06d10683f7447ce9406cb97cd4b453be18d7279320f7b2f025c10187 ARROW_OPENTELEMETRY_BUILD_VERSION=v1.21.0 From a15264ca5daa0796c86ca634ca3bc90a03fa3a87 Mon Sep 17 00:00:00 2001 From: Antoine Pitrou Date: Thu, 12 Jun 2025 10:03:43 +0200 Subject: [PATCH 43/63] GH-46710: [C++] Fix ownership and lifetime issues in Dataset Writer (#46711) ### Rationale for this change The dataset writer currently uses raw pointers of the pieces of state it needs to track, even though some of this state is accessed in async callbacks that might run at arbitrary points in time (especially in the case an error forces an early return of the synchronously running logic). ### What changes are included in this PR? This PR strives to strengthen the dataset writer code by using safe pointers everywhere possible, and also trying to guard against potential leaks due to cyclic shared_ptr references. Besides making it potentially more robust, it should also make future maintenance and evolutions easier. This PR seems to fix #45235, though it will have to be confirmed after multiple CI runs. ### Are these changes tested? Yes. ### Are there any user-facing changes? No. * GitHub Issue: #46710 Authored-by: Antoine Pitrou Signed-off-by: Antoine Pitrou --- cpp/src/arrow/dataset/dataset_writer.cc | 156 +++++++++++++++--------- cpp/src/arrow/util/async_util.cc | 24 ++-- 2 files changed, 109 insertions(+), 71 deletions(-) diff --git a/cpp/src/arrow/dataset/dataset_writer.cc b/cpp/src/arrow/dataset/dataset_writer.cc index 43895374aa7..7845f488219 100644 --- a/cpp/src/arrow/dataset/dataset_writer.cc +++ b/cpp/src/arrow/dataset/dataset_writer.cc @@ -131,28 +131,38 @@ Result> OpenWriter( {write_options.filesystem, filename}); } -class DatasetWriterFileQueue { +class DatasetWriterFileQueue + : public std::enable_shared_from_this { public: explicit DatasetWriterFileQueue(const std::shared_ptr& schema, const FileSystemDatasetWriteOptions& options, - DatasetWriterState* writer_state) - : options_(options), schema_(schema), writer_state_(writer_state) {} + std::shared_ptr writer_state) + : options_(options), schema_(schema), writer_state_(std::move(writer_state)) {} - void Start(util::AsyncTaskScheduler* file_tasks, const std::string& filename) { - file_tasks_ = file_tasks; + void Start(std::unique_ptr file_tasks, + std::string filename) { + file_tasks_ = std::move(file_tasks); // Because the scheduler runs one task at a time we know the writer will // be opened before any attempt to write file_tasks_->AddSimpleTask( - [this, filename] { - Executor* io_executor = options_.filesystem->io_context().executor(); - return DeferNotOk(io_executor->Submit([this, filename]() { - ARROW_ASSIGN_OR_RAISE(writer_, OpenWriter(options_, schema_, filename)); + [self = shared_from_this(), filename = std::move(filename)] { + Executor* io_executor = self->options_.filesystem->io_context().executor(); + return DeferNotOk(io_executor->Submit([self, filename = std::move(filename)]() { + ARROW_ASSIGN_OR_RAISE(self->writer_, + OpenWriter(self->options_, self->schema_, filename)); return Status::OK(); })); }, "DatasetWriter::OpenWriter"sv); } + void Abort() { + // The scheduler may be keeping this object alive through shared_ptr references + // in async closures. Make sure we break any reference cycles by losing our + // reference to the scheduler. + file_tasks_.reset(); + } + Result> PopStagedBatch() { std::vector> batches_to_write; uint64_t num_rows = 0; @@ -184,7 +194,7 @@ class DatasetWriterFileQueue { void ScheduleBatch(std::shared_ptr batch) { file_tasks_->AddSimpleTask( - [self = this, batch = std::move(batch)]() { + [self = shared_from_this(), batch = std::move(batch)]() { return self->WriteNext(std::move(batch)); }, "DatasetWriter::WriteBatch"sv); @@ -217,13 +227,18 @@ class DatasetWriterFileQueue { Status Finish() { writer_state_->staged_rows_count -= rows_currently_staged_; while (!staged_batches_.empty()) { - RETURN_NOT_OK(PopAndDeliverStagedBatch()); + auto st = PopAndDeliverStagedBatch().status(); + if (!st.ok()) { + file_tasks_.reset(); + return st; + } } // At this point all write tasks have been added. Because the scheduler // is a 1-task FIFO we know this task will run at the very end and can // add it now. - file_tasks_->AddSimpleTask([this] { return DoFinish(); }, + file_tasks_->AddSimpleTask([self = shared_from_this()] { return self->DoFinish(); }, "DatasetWriter::FinishFile"sv); + file_tasks_.reset(); return Status::OK(); } @@ -231,7 +246,7 @@ class DatasetWriterFileQueue { Future<> WriteNext(std::shared_ptr next) { // May want to prototype / measure someday pushing the async write down further return DeferNotOk(options_.filesystem->io_context().executor()->Submit( - [self = this, batch = std::move(next)]() { + [self = shared_from_this(), batch = std::move(next)]() { int64_t rows_to_release = batch->num_rows(); Status status = self->writer_->Write(batch); self->writer_state_->rows_in_flight_throttle.Release(rows_to_release); @@ -244,21 +259,22 @@ class DatasetWriterFileQueue { std::lock_guard lg(writer_state_->visitors_mutex); RETURN_NOT_OK(options_.writer_pre_finish(writer_.get())); } - return writer_->Finish().Then([this]() { - std::lock_guard lg(writer_state_->visitors_mutex); - return options_.writer_post_finish(writer_.get()); - }); + return writer_->Finish().Then( + [self = shared_from_this(), writer_post_finish = options_.writer_post_finish]() { + std::lock_guard lg(self->writer_state_->visitors_mutex); + return writer_post_finish(self->writer_.get()); + }); } const FileSystemDatasetWriteOptions& options_; const std::shared_ptr& schema_; - DatasetWriterState* writer_state_; + std::shared_ptr writer_state_; std::shared_ptr writer_; // Batches are accumulated here until they are large enough to write out at which // point they are merged together and added to write_queue_ std::deque> staged_batches_; uint64_t rows_currently_staged_ = 0; - util::AsyncTaskScheduler* file_tasks_ = nullptr; + std::unique_ptr file_tasks_; }; struct WriteTask { @@ -266,18 +282,25 @@ struct WriteTask { uint64_t num_rows; }; -class DatasetWriterDirectoryQueue { +class DatasetWriterDirectoryQueue + : public std::enable_shared_from_this { public: DatasetWriterDirectoryQueue(util::AsyncTaskScheduler* scheduler, std::string directory, std::string prefix, std::shared_ptr schema, const FileSystemDatasetWriteOptions& write_options, - DatasetWriterState* writer_state) + std::shared_ptr writer_state) : scheduler_(std::move(scheduler)), directory_(std::move(directory)), prefix_(std::move(prefix)), schema_(std::move(schema)), write_options_(write_options), - writer_state_(writer_state) {} + writer_state_(std::move(writer_state)) {} + + ~DatasetWriterDirectoryQueue() { + if (latest_open_file_) { + latest_open_file_->Abort(); + } + } Result> NextWritableChunk( std::shared_ptr batch, std::shared_ptr* remainder, @@ -330,32 +353,27 @@ class DatasetWriterDirectoryQueue { Status FinishCurrentFile() { if (latest_open_file_) { - ARROW_RETURN_NOT_OK(latest_open_file_->Finish()); - latest_open_file_tasks_.reset(); - latest_open_file_ = nullptr; + auto file = std::move(latest_open_file_); + ARROW_RETURN_NOT_OK(file->Finish()); } rows_written_ = 0; return GetNextFilename().Value(¤t_filename_); } Status OpenFileQueue(const std::string& filename) { - auto file_queue = - std::make_unique(schema_, write_options_, writer_state_); - latest_open_file_ = file_queue.get(); - // Create a dedicated throttle for write jobs to this file and keep it alive until we - // are finished and have closed the file. - auto file_finish_task = [this, file_queue = std::move(file_queue)] { - writer_state_->open_files_throttle.Release(1); + latest_open_file_.reset( + new DatasetWriterFileQueue(schema_, write_options_, writer_state_)); + auto file_finish_task = [self = shared_from_this()] { + self->writer_state_->open_files_throttle.Release(1); return Status::OK(); }; - latest_open_file_tasks_ = util::MakeThrottledAsyncTaskGroup( - scheduler_, 1, /*queue=*/nullptr, std::move(file_finish_task)); + auto file_tasks = util::MakeThrottledAsyncTaskGroup(scheduler_, 1, /*queue=*/nullptr, + std::move(file_finish_task)); if (init_future_.is_valid()) { - latest_open_file_tasks_->AddSimpleTask( - [init_future = init_future_]() { return init_future; }, - "DatasetWriter::WaitForDirectoryInit"sv); + file_tasks->AddSimpleTask([init_future = init_future_]() { return init_future; }, + "DatasetWriter::WaitForDirectoryInit"sv); } - latest_open_file_->Start(latest_open_file_tasks_.get(), filename); + latest_open_file_->Start(std::move(file_tasks), filename); return Status::OK(); } @@ -398,14 +416,14 @@ class DatasetWriterDirectoryQueue { "DatasetWriter::InitializeDirectory"sv); } - static Result> Make( + static Result> Make( util::AsyncTaskScheduler* scheduler, const FileSystemDatasetWriteOptions& write_options, - DatasetWriterState* writer_state, std::shared_ptr schema, + std::shared_ptr writer_state, std::shared_ptr schema, std::string directory, std::string prefix) { - auto dir_queue = std::make_unique( + auto dir_queue = std::make_shared( scheduler, std::move(directory), std::move(prefix), std::move(schema), - write_options, writer_state); + write_options, std::move(writer_state)); dir_queue->PrepareDirectory(); ARROW_ASSIGN_OR_RAISE(dir_queue->current_filename_, dir_queue->GetNextFilename()); return dir_queue; @@ -413,26 +431,31 @@ class DatasetWriterDirectoryQueue { Status Finish() { if (latest_open_file_) { - ARROW_RETURN_NOT_OK(latest_open_file_->Finish()); - latest_open_file_tasks_.reset(); - latest_open_file_ = nullptr; + auto file = std::move(latest_open_file_); + ARROW_RETURN_NOT_OK(file->Finish()); } used_filenames_.clear(); return Status::OK(); } + void Abort() { + if (latest_open_file_) { + latest_open_file_->Abort(); + latest_open_file_.reset(); + } + } + private: util::AsyncTaskScheduler* scheduler_ = nullptr; std::string directory_; std::string prefix_; std::shared_ptr schema_; const FileSystemDatasetWriteOptions& write_options_; - DatasetWriterState* writer_state_; + std::shared_ptr writer_state_; Future<> init_future_; std::string current_filename_; std::unordered_set used_filenames_; - DatasetWriterFileQueue* latest_open_file_ = nullptr; - std::unique_ptr latest_open_file_tasks_; + std::shared_ptr latest_open_file_; uint64_t rows_written_ = 0; uint32_t file_counter_ = 0; }; @@ -520,11 +543,26 @@ class DatasetWriter::DatasetWriterImpl { return Status::OK(); })), write_options_(std::move(write_options)), - writer_state_(max_rows_queued, write_options_.max_open_files, - CalculateMaxRowsStaged(max_rows_queued)), + writer_state_(std::make_shared( + max_rows_queued, write_options_.max_open_files, + CalculateMaxRowsStaged(max_rows_queued))), pause_callback_(std::move(pause_callback)), resume_callback_(std::move(resume_callback)) {} + ~DatasetWriterImpl() { + // In case something went wrong (e.g. an IO error occurred), some tasks + // may be left dangling in a ThrottledAsyncTaskScheduler and that may + // lead to memory leaks via shared_ptr reference cycles (this can show up + // in some unit tests under Valgrind). + // To prevent this, explicitly break reference cycles at DatasetWriter + // destruction. + // The alternative is to use weak_from_this() thoroughly in async callbacks, + // but that makes for less readable code. + for (const auto& directory_queue : directory_queues_) { + directory_queue.second->Abort(); + } + } + Future<> WriteAndCheckBackpressure(std::shared_ptr batch, const std::string& directory, const std::string& prefix) { @@ -592,8 +630,10 @@ class DatasetWriter::DatasetWriterImpl { "DatasetWriter::FinishAll"sv); // Reset write_tasks_ to signal that we are done adding tasks, this will allow // us to invoke the finish callback once the tasks wrap up. - std::lock_guard lg(mutex_); - write_tasks_.reset(); + { + std::lock_guard lg(mutex_); + write_tasks_.reset(); + } } protected: @@ -621,7 +661,7 @@ class DatasetWriter::DatasetWriterImpl { &directory_queues_, directory + prefix, [this, &batch, &directory, &prefix](const std::string& key) { return DatasetWriterDirectoryQueue::Make(scheduler_, write_options_, - &writer_state_, batch->schema(), + writer_state_, batch->schema(), directory, prefix); })); std::shared_ptr dir_queue = dir_queue_itr->second; @@ -643,16 +683,16 @@ class DatasetWriter::DatasetWriterImpl { continue; } backpressure = - writer_state_.rows_in_flight_throttle.Acquire(next_chunk->num_rows()); + writer_state_->rows_in_flight_throttle.Acquire(next_chunk->num_rows()); if (!backpressure.is_finished()) { EVENT_ON_CURRENT_SPAN("DatasetWriter::Backpressure::TooManyRowsQueued"); break; } if (will_open_file) { - backpressure = writer_state_.open_files_throttle.Acquire(1); + backpressure = writer_state_->open_files_throttle.Acquire(1); if (!backpressure.is_finished()) { EVENT_ON_CURRENT_SPAN("DatasetWriter::Backpressure::TooManyOpenFiles"); - writer_state_.rows_in_flight_throttle.Release(next_chunk->num_rows()); + writer_state_->rows_in_flight_throttle.Release(next_chunk->num_rows()); RETURN_NOT_OK(TryCloseLargestFile()); break; } @@ -664,7 +704,7 @@ class DatasetWriter::DatasetWriterImpl { // // `open_files_throttle` will be handed by `DatasetWriterDirectoryQueue` // so we don't need to release it here. - writer_state_.rows_in_flight_throttle.Release(next_chunk->num_rows()); + writer_state_->rows_in_flight_throttle.Release(next_chunk->num_rows()); return s; } batch = std::move(remainder); @@ -685,7 +725,7 @@ class DatasetWriter::DatasetWriterImpl { std::unique_ptr write_tasks_; Future<> finish_fut_ = Future<>::Make(); FileSystemDatasetWriteOptions write_options_; - DatasetWriterState writer_state_; + std::shared_ptr writer_state_; std::function pause_callback_; std::function resume_callback_; // Map from directory + prefix to the queue for that directory diff --git a/cpp/src/arrow/util/async_util.cc b/cpp/src/arrow/util/async_util.cc index 46825c35da0..f8b979a3f56 100644 --- a/cpp/src/arrow/util/async_util.cc +++ b/cpp/src/arrow/util/async_util.cc @@ -316,15 +316,11 @@ class ThrottledAsyncTaskSchedulerImpl #endif queue_->Push(std::move(task)); lk.unlock(); - maybe_backoff->AddCallback( - [weak_self = std::weak_ptr( - shared_from_this())](const Status& st) { - if (st.ok()) { - if (auto self = weak_self.lock()) { - self->ContinueTasks(); - } - } - }); + maybe_backoff->AddCallback([weak_self = weak_from_this()](const Status& st) { + if (auto self = weak_self.lock(); self && st.ok()) { + self->ContinueTasks(); + } + }); return true; } else { lk.unlock(); @@ -350,8 +346,9 @@ class ThrottledAsyncTaskSchedulerImpl self = shared_from_this()]() mutable -> Result> { ARROW_ASSIGN_OR_RAISE(Future<> inner_fut, (*inner_task)()); if (!inner_fut.TryAddCallback([&] { - return [latched_cost, self = std::move(self)](const Status& st) -> void { - if (st.ok()) { + return [latched_cost, + weak_self = self->weak_from_this()](const Status& st) -> void { + if (auto self = weak_self.lock(); self && st.ok()) { self->throttle_->Release(latched_cost); self->ContinueTasks(); } @@ -360,6 +357,7 @@ class ThrottledAsyncTaskSchedulerImpl // If the task is already finished then don't run ContinueTasks // if we are already running it so we can avoid stack overflow self->throttle_->Release(latched_cost); + inner_task.reset(); if (!in_continue) { self->ContinueTasks(); } @@ -377,8 +375,8 @@ class ThrottledAsyncTaskSchedulerImpl if (maybe_backoff) { lk.unlock(); if (!maybe_backoff->TryAddCallback([&] { - return [self = shared_from_this()](const Status& st) { - if (st.ok()) { + return [weak_self = weak_from_this()](const Status& st) { + if (auto self = weak_self.lock(); self && st.ok()) { self->ContinueTasks(); } }; From 28f7e7987601394d8f17bc065ecb2ac9edce2a53 Mon Sep 17 00:00:00 2001 From: Sutou Kouhei Date: Thu, 12 Jun 2025 21:47:52 +0900 Subject: [PATCH 44/63] GH-46785: [CI][Dev][C++] Suppress needless outputs of cpplint with pre-commit (#46786) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ### Rationale for this change If cpplint outputs "Done ..." for each file, it's difficult that we find lint errors. ### What changes are included in this PR? Suppress "Done ..." messages. ### Are these changes tested? Yes. ### Are there any user-facing changes? No. * GitHub Issue: #46785 Authored-by: Sutou Kouhei Signed-off-by: Raúl Cumplido --- .pre-commit-config.yaml | 1 + 1 file changed, 1 insertion(+) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index c3835ac0f1b..8ea80794ebc 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -77,6 +77,7 @@ repos: alias: cpp name: C++ Lint args: + - "--quiet" - "--verbose=2" types_or: - c++ From 8fa54bcd74183c2ce932c3165402d484a51f0a1b Mon Sep 17 00:00:00 2001 From: Sutou Kouhei Date: Fri, 13 Jun 2025 09:21:36 +0900 Subject: [PATCH 45/63] GH-46787: [CI][Integration] Use Node.js 20 (#46790) ### Rationale for this change Node.js 18 reached EOL on 2025-04-30: https://github.com/nodejs/release#release-schedule ### What changes are included in this PR? Use Node.js 20. ### Are these changes tested? Yes. ### Are there any user-facing changes? No. * GitHub Issue: #46787 Authored-by: Sutou Kouhei Signed-off-by: Sutou Kouhei --- ci/docker/conda-integration.dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/ci/docker/conda-integration.dockerfile b/ci/docker/conda-integration.dockerfile index 7841f942010..9101446da66 100644 --- a/ci/docker/conda-integration.dockerfile +++ b/ci/docker/conda-integration.dockerfile @@ -21,7 +21,7 @@ FROM ${repo}:${arch}-conda-cpp ARG arch=amd64 ARG maven=3.8.7 -ARG node=16 +ARG node=20 ARG yarn=1.22 ARG jdk=11 From fe0dc8e8b1309bc55a6b7f431524b4ebad07ae97 Mon Sep 17 00:00:00 2001 From: DenisTarasyuk <131180287+DenisTarasyuk@users.noreply.github.com> Date: Fri, 13 Jun 2025 05:17:02 +0300 Subject: [PATCH 46/63] GH-46708: [C++][Gandiva] Added zero return values for castDECIMAL_utf8 (#46709) ### Rationale for this change castDECIMAL_utf8 has undefined behavior if input value can not be parsed, which causes SIGSEGV for some expressions in Projector. ### What changes are included in this PR? Setting 0 to out_high, out_low in function castDECIMAL_utf8 to have those values initialised if input value can not be parsed. Added corresponding test that reproduces SIGSEGV in projector. ### Are these changes tested? Yes ### Are there any user-facing changes? No * GitHub Issue: #46708 Authored-by: DenisTarasyuk Signed-off-by: Sutou Kouhei --- .../gandiva/precompiled/decimal_wrapper.cc | 2 + cpp/src/gandiva/tests/decimal_test.cc | 41 +++++++++++++++++++ 2 files changed, 43 insertions(+) diff --git a/cpp/src/gandiva/precompiled/decimal_wrapper.cc b/cpp/src/gandiva/precompiled/decimal_wrapper.cc index 082d5832d14..cffb7ae9781 100644 --- a/cpp/src/gandiva/precompiled/decimal_wrapper.cc +++ b/cpp/src/gandiva/precompiled/decimal_wrapper.cc @@ -406,6 +406,8 @@ void castDECIMAL_utf8(int64_t context, const char* in, int32_t in_length, gdv_fn_dec_from_string(context, in, in_length, &precision_from_str, &scale_from_str, &dec_high_from_str, &dec_low_from_str); if (status != 0) { + *out_high = 0; + *out_low = 0; return; } diff --git a/cpp/src/gandiva/tests/decimal_test.cc b/cpp/src/gandiva/tests/decimal_test.cc index 0f2266c2cff..f8d049fd805 100644 --- a/cpp/src/gandiva/tests/decimal_test.cc +++ b/cpp/src/gandiva/tests/decimal_test.cc @@ -17,6 +17,7 @@ #include +#include #include #include "arrow/memory_pool.h" #include "arrow/status.h" @@ -1237,4 +1238,44 @@ TEST_F(TestDecimal, TestSha) { EXPECT_NE(value_at_position, response->GetScalar(i - 1).ValueOrDie()->ToString()); } } + +TEST_F(TestDecimal, TestCastDecimalVarCharInvalidInputInvalidOutput) { + auto decimal_type_10_0 = std::make_shared(10, 0); + auto decimal_type_38_30 = std::make_shared(38, 30); + auto decimal_type_38_27 = std::make_shared(38, 27); + + auto field_str = field("in_str", utf8()); + auto schema = arrow::schema({field_str}); + auto res_bool = field("res_bool", arrow::boolean()); + + // This is minimal possible expression to reproduce SIGSEGV + // equal(multiply(castDecimal(10), castDecimal(100)), castDECIMAL("foo")) + auto int_literal = TreeExprBuilder::MakeLiteral(static_cast(100)); + auto int_literal_multiply = TreeExprBuilder::MakeLiteral(static_cast(10)); + auto string_literal = TreeExprBuilder::MakeStringLiteral("foo"); + auto cast_multiply_literal = TreeExprBuilder::MakeFunction( + "castDECIMAL", {int_literal_multiply}, decimal_type_10_0); + auto cast_int_literal = + TreeExprBuilder::MakeFunction("castDECIMAL", {int_literal}, decimal_type_38_30); + auto cast_string_func = + TreeExprBuilder::MakeFunction("castDECIMAL", {string_literal}, decimal_type_38_30); + auto multiply_func = TreeExprBuilder::MakeFunction( + "multiply", {cast_multiply_literal, cast_int_literal}, decimal_type_38_27); + auto equal_func = TreeExprBuilder::MakeFunction( + "equal", {multiply_func, cast_string_func}, arrow::boolean()); + auto expr = TreeExprBuilder::MakeExpression(equal_func, res_bool); + + std::shared_ptr projector; + + ASSERT_OK(Projector::Make(schema, {expr}, TestConfiguration(), &projector)); + + int num_records = 1; + auto invalid_in = MakeArrowArrayUtf8({"1.345"}, {true}); + auto in_batch = arrow::RecordBatch::Make(schema, num_records, {invalid_in}); + + arrow::ArrayVector outputs; + auto status = projector->Evaluate(*in_batch, pool_, &outputs); + ASSERT_NOT_OK(status); + ASSERT_THAT(status.message(), ::testing::HasSubstr("not a valid decimal128 number")); +} } // namespace gandiva From df578f7696d85f92f237de347d9804eb8f209d95 Mon Sep 17 00:00:00 2001 From: Sutou Kouhei Date: Fri, 13 Jun 2025 16:10:30 +0900 Subject: [PATCH 47/63] GH-46528: [CI][Dev] Remove "archery lint" (#46686) ### Rationale for this change We can use pre-commit instead of "archery lint" for all linters/formatters. ### What changes are included in this PR? * Remove "archery lint" CI job * Remove "archery lint" related codes including IWYU and clang-tidy because IWYU and clang-tidy aren't used for now * Remove "archery lint" related docs including IWYU and clang-tidy related docs ### Are these changes tested? Yes. ### Are there any user-facing changes? No. It's only affected to developers. * GitHub Issue: #46528 Lead-authored-by: Sutou Kouhei Co-authored-by: Sutou Kouhei Co-authored-by: Bryce Mecum Signed-off-by: Sutou Kouhei --- .github/workflows/dev.yml | 21 - .github/workflows/r.yml | 18 - .pre-commit-config.yaml | 3 +- LICENSE.txt | 12 +- NOTICE.txt | 3 - ci/docker/linux-apt-lint.dockerfile | 74 - ci/scripts/install_iwyu.sh | 48 - cpp/CMakeLists.txt | 117 +- cpp/build-support/cpplint.py | 6927 ----------------- cpp/build-support/iwyu/iwyu-filter.awk | 96 - cpp/build-support/iwyu/iwyu.sh | 90 - cpp/build-support/iwyu/iwyu_tool.py | 280 - .../iwyu/mappings/arrow-misc.imp | 61 - .../iwyu/mappings/boost-all-private.imp | 4166 ---------- cpp/build-support/iwyu/mappings/boost-all.imp | 5679 -------------- .../iwyu/mappings/boost-extra.imp | 23 - cpp/build-support/iwyu/mappings/gflags.imp | 20 - cpp/build-support/iwyu/mappings/glog.imp | 27 - cpp/build-support/iwyu/mappings/gmock.imp | 23 - cpp/build-support/iwyu/mappings/gtest.imp | 26 - cpp/build-support/lint_cpp_cli.py | 130 - cpp/build-support/lint_exclusions.txt | 13 - cpp/build-support/lintutils.py | 109 - cpp/build-support/run-infer.sh | 48 - cpp/build-support/run_clang_format.py | 137 - cpp/build-support/run_clang_tidy.py | 126 - cpp/build-support/run_cpplint.py | 114 - cpp/cmake_modules/DefineOptions.cmake | 7 +- dev/archery/archery/cli.py | 61 +- dev/archery/archery/lang/cpp.py | 10 +- dev/archery/archery/lang/python.py | 25 - dev/archery/archery/utils/lint.py | 411 +- dev/archery/archery/utils/rat.py | 70 - dev/archery/setup.py | 2 - docker-compose.yml | 29 - docs/source/developers/cpp/development.rst | 121 +- .../developers/guide/step_by_step/styling.rst | 9 +- .../guide/tutorials/python_tutorial.rst | 20 +- docs/source/developers/python.rst | 25 +- r/lint.sh | 54 - r/tools/lint.R | 26 - r/vignettes/developers/workflow.Rmd | 63 +- 42 files changed, 34 insertions(+), 19290 deletions(-) delete mode 100644 ci/docker/linux-apt-lint.dockerfile delete mode 100755 ci/scripts/install_iwyu.sh delete mode 100755 cpp/build-support/cpplint.py delete mode 100644 cpp/build-support/iwyu/iwyu-filter.awk delete mode 100755 cpp/build-support/iwyu/iwyu.sh delete mode 100755 cpp/build-support/iwyu/iwyu_tool.py delete mode 100644 cpp/build-support/iwyu/mappings/arrow-misc.imp delete mode 100644 cpp/build-support/iwyu/mappings/boost-all-private.imp delete mode 100644 cpp/build-support/iwyu/mappings/boost-all.imp delete mode 100644 cpp/build-support/iwyu/mappings/boost-extra.imp delete mode 100644 cpp/build-support/iwyu/mappings/gflags.imp delete mode 100644 cpp/build-support/iwyu/mappings/glog.imp delete mode 100644 cpp/build-support/iwyu/mappings/gmock.imp delete mode 100644 cpp/build-support/iwyu/mappings/gtest.imp delete mode 100755 cpp/build-support/lint_cpp_cli.py delete mode 100644 cpp/build-support/lint_exclusions.txt delete mode 100644 cpp/build-support/lintutils.py delete mode 100755 cpp/build-support/run-infer.sh delete mode 100755 cpp/build-support/run_clang_format.py delete mode 100755 cpp/build-support/run_clang_tidy.py delete mode 100755 cpp/build-support/run_cpplint.py delete mode 100644 dev/archery/archery/utils/rat.py delete mode 100755 r/lint.sh delete mode 100755 r/tools/lint.R diff --git a/.github/workflows/dev.yml b/.github/workflows/dev.yml index d0f1fa91265..a8e7b396760 100644 --- a/.github/workflows/dev.yml +++ b/.github/workflows/dev.yml @@ -66,27 +66,6 @@ jobs: - name: Run pre-commit run: | pre-commit run --all-files --color=always --show-diff-on-failure - - name: Setup Archery - run: pip install -e dev/archery[docker] - - name: Execute Docker Build - env: - ARCHERY_DOCKER_USER: ${{ secrets.DOCKERHUB_USER }} - ARCHERY_DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }} - UBUNTU: 22.04 - run: | - source ci/scripts/util_enable_core_dumps.sh - archery docker run -e GITHUB_ACTIONS=true ubuntu-lint - - name: Docker Push - if: >- - success() && - github.event_name == 'push' && - github.repository == 'apache/arrow' && - github.ref_name == 'main' - env: - ARCHERY_DOCKER_USER: ${{ secrets.DOCKERHUB_USER }} - ARCHERY_DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }} - continue-on-error: true - run: archery docker push ubuntu-lint release: name: Source Release and Merge Script on ${{ matrix.runs-on }} diff --git a/.github/workflows/r.yml b/.github/workflows/r.yml index 092a92cfc3b..33c04b86a01 100644 --- a/.github/workflows/r.yml +++ b/.github/workflows/r.yml @@ -331,24 +331,6 @@ jobs: check_dir = 'check', timeout = 3600 ) - - name: Run lintr - if: ${{ matrix.config.rversion == 'release' }} - env: - NOT_CRAN: "true" - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - shell: Rscript {0} - working-directory: r - run: | - Sys.setenv( - RWINLIB_LOCAL = file.path(Sys.getenv("GITHUB_WORKSPACE"), "r", "windows", "libarrow.zip"), - MAKEFLAGS = paste0("-j", parallel::detectCores()), - ARROW_R_DEV = TRUE, - "_R_CHECK_FORCE_SUGGESTS_" = FALSE - ) - # we use pak for package installation since it is faster, safer and more convenient - pak::local_install() - pak::pak("lintr") - lintr::expect_lint_free() - name: Dump install logs shell: cmd run: cat r/check/arrow.Rcheck/00install.out diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 8ea80794ebc..9598d6d44f6 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -197,8 +197,9 @@ repos: additional_dependencies: - cyclocomp - lintr + - testthat entry: | - Rscript r/tools/lint.R + Rscript -e "Sys.setenv(NOT_CRAN = 'TRUE'); lintr::expect_lint_free('r')" pass_filenames: false files: >- ^r/.*\.(R|Rmd)$ diff --git a/LICENSE.txt b/LICENSE.txt index c1f3c456198..2c90f0313d7 100644 --- a/LICENSE.txt +++ b/LICENSE.txt @@ -783,16 +783,6 @@ License: http://www.apache.org/licenses/LICENSE-2.0 -------------------------------------------------------------------------------- -This project includes code from the Google styleguide. - -* cpp/build-support/cpplint.py is based on the scripts from the Google styleguide. - -Copyright: 2009 Google Inc. All rights reserved. -Homepage: https://github.com/google/styleguide -License: 3-clause BSD - --------------------------------------------------------------------------------- - This project includes code from Snappy. * cpp/cmake_modules/{SnappyCMakeLists.txt,SnappyConfig.h} are based on code @@ -2292,7 +2282,7 @@ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. -------------------------------------------------------------------------------- -The files cpp/src/arrow/vendored/whereami/whereami.h, +The files cpp/src/arrow/vendored/whereami/whereami.h, cpp/src/arrow/vendored/whereami/whereami.cc are adapted from Grégory Pakosz's whereami library (https://github.com/gpakosz/whereami) It is dual licensed under both the WTFPLv2 and MIT licenses. diff --git a/NOTICE.txt b/NOTICE.txt index 2089c6fb203..9b98364d2ab 100644 --- a/NOTICE.txt +++ b/NOTICE.txt @@ -17,9 +17,6 @@ https://github.com/libdynd This product includes software from the LLVM project * distributed under the University of Illinois Open Source -This product includes software from the google-lint project - * Copyright (c) 2009 Google Inc. All rights reserved. - This product includes software from the mman-win32 project * Copyright https://code.google.com/p/mman-win32/ * Licensed under the MIT License; diff --git a/ci/docker/linux-apt-lint.dockerfile b/ci/docker/linux-apt-lint.dockerfile deleted file mode 100644 index b73cc585ea7..00000000000 --- a/ci/docker/linux-apt-lint.dockerfile +++ /dev/null @@ -1,74 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -ARG base -FROM hadolint/hadolint:v1.17.2 AS hadolint -FROM ${base} - -ARG clang_tools -RUN apt-get update && \ - apt-get install -y -q \ - clang-${clang_tools} \ - clang-format-${clang_tools} \ - clang-tidy-${clang_tools} \ - clang-tools-${clang_tools} \ - cmake \ - curl \ - libclang-${clang_tools}-dev \ - llvm-${clang_tools}-dev \ - openjdk-11-jdk-headless \ - python3 \ - python3-dev \ - python3-pip \ - ruby \ - apt-transport-https \ - software-properties-common \ - && apt-get clean \ - && rm -rf /var/lib/apt/lists/* - -ARG r=4.4 -RUN wget -qO- https://cloud.r-project.org/bin/linux/ubuntu/marutter_pubkey.asc | \ - tee -a /etc/apt/trusted.gpg.d/cran_ubuntu_key.asc && \ - # NOTE: Only R >= 4.0 is available in this repo - add-apt-repository 'deb https://cloud.r-project.org/bin/linux/ubuntu '$(lsb_release -cs)'-cran40/' && \ - apt-get install -y \ - r-base=${r}* \ - r-recommended=${r}* \ - libxml2-dev - -# Ensure parallel R package installation, set CRAN repo mirror, -# and use pre-built binaries where possible -COPY ci/etc/rprofile /arrow/ci/etc/ -RUN cat /arrow/ci/etc/rprofile >> $(R RHOME)/etc/Rprofile.site -# Also ensure parallel compilation of C/C++ code -RUN echo "MAKEFLAGS=-j$(R -s -e 'cat(parallel::detectCores())')" >> $(R RHOME)/etc/Renviron.site -# We don't need arrow's dependencies, only lintr (and its dependencies) -RUN R -e "install.packages('lintr')" -RUN R -e "install.packages('cyclocomp')" - -# Docker linter -COPY --from=hadolint /bin/hadolint /usr/bin/hadolint - -# IWYU -COPY ci/scripts/install_iwyu.sh /arrow/ci/scripts/ -RUN arrow/ci/scripts/install_iwyu.sh /tmp/iwyu /usr/local ${clang_tools} - -# Use python3 by default in scripts -RUN ln -s /usr/bin/python3 /usr/local/bin/python - -ENV LC_ALL=C.UTF-8 \ - LANG=C.UTF-8 diff --git a/ci/scripts/install_iwyu.sh b/ci/scripts/install_iwyu.sh deleted file mode 100755 index 03e6b92f299..00000000000 --- a/ci/scripts/install_iwyu.sh +++ /dev/null @@ -1,48 +0,0 @@ -#!/usr/bin/env bash -# -# Licensed to the Apache Software Foundation (ASF) under one or more -# contributor license agreements. See the NOTICE file distributed with -# this work for additional information regarding copyright ownership. -# The ASF licenses this file to You under the Apache License, Version 2.0 -# (the "License"); you may not use this file except in compliance with -# the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# - -set -eu - -source_dir=${1:-/tmp/iwyu} -install_prefix=${2:-/usr/local} -clang_tools_version=${3:-8} - -iwyu_branch_name="clang_${clang_tools_version}" -if [ "${clang_tools_version}" -lt 10 ]; then - iwyu_branch_name="${iwyu_branch_name}.0" -fi - -git clone --single-branch --branch "${iwyu_branch_name}" \ - https://github.com/include-what-you-use/include-what-you-use.git "${source_dir}" - -mkdir -p "${source_dir}/build" -pushd "${source_dir}/build" - -# Build IWYU for current Clang -export CC=clang-${clang_tools_version} -export CXX=clang++-${clang_tools_version} - -cmake -DCMAKE_PREFIX_PATH="/usr/lib/llvm-${clang_tools_version}" \ - -DCMAKE_INSTALL_PREFIX="${install_prefix}" \ - "${source_dir}" -make -j4 -make install - -popd - -rm -rf "${source_dir}" diff --git a/cpp/CMakeLists.txt b/cpp/CMakeLists.txt index 9854ac4a32c..3a71e147b54 100644 --- a/cpp/CMakeLists.txt +++ b/cpp/CMakeLists.txt @@ -208,7 +208,7 @@ if("$ENV{CMAKE_EXPORT_COMPILE_COMMANDS}" STREQUAL "1" set(CMAKE_EXPORT_COMPILE_COMMANDS 1) endif() -# Needed for linting targets, etc. +# Needed for Gandiva. # Use the first Python installation on PATH, not the newest one set(Python3_FIND_STRATEGY "LOCATION") # On Windows, use registry last, not first @@ -276,121 +276,6 @@ if(ARROW_OPTIONAL_INSTALL) set(INSTALL_IS_OPTIONAL OPTIONAL) endif() -# -# "make lint" target -# -if(NOT ARROW_VERBOSE_LINT) - set(ARROW_LINT_QUIET "--quiet") -endif() - -if(NOT LINT_EXCLUSIONS_FILE) - # source files matching a glob from a line in this file - # will be excluded from linting (cpplint, clang-tidy, clang-format) - set(LINT_EXCLUSIONS_FILE ${BUILD_SUPPORT_DIR}/lint_exclusions.txt) -endif() - -find_program(CPPLINT_BIN - NAMES cpplint cpplint.py - HINTS ${BUILD_SUPPORT_DIR}) -message(STATUS "Found cpplint executable at ${CPPLINT_BIN}") - -set(COMMON_LINT_OPTIONS - --exclude_globs - ${LINT_EXCLUSIONS_FILE} - --source_dir - ${CMAKE_CURRENT_SOURCE_DIR}/src - --source_dir - ${CMAKE_CURRENT_SOURCE_DIR}/examples - --source_dir - ${CMAKE_CURRENT_SOURCE_DIR}/tools) - -add_custom_target(lint - ${PYTHON_EXECUTABLE} - ${BUILD_SUPPORT_DIR}/run_cpplint.py - --cpplint_binary - ${CPPLINT_BIN} - ${COMMON_LINT_OPTIONS} - ${ARROW_LINT_QUIET} - WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/..) - -# -# "make format" and "make check-format" targets -# -if(${CLANG_FORMAT_FOUND}) - # runs clang format and updates files in place. - add_custom_target(format - ${PYTHON_EXECUTABLE} - ${BUILD_SUPPORT_DIR}/run_clang_format.py - --clang_format_binary - ${CLANG_FORMAT_BIN} - ${COMMON_LINT_OPTIONS} - --fix - ${ARROW_LINT_QUIET}) - - # runs clang format and exits with a non-zero exit code if any files need to be reformatted - add_custom_target(check-format - ${PYTHON_EXECUTABLE} - ${BUILD_SUPPORT_DIR}/run_clang_format.py - --clang_format_binary - ${CLANG_FORMAT_BIN} - ${COMMON_LINT_OPTIONS} - ${ARROW_LINT_QUIET}) -endif() - -add_custom_target(lint_cpp_cli ${PYTHON_EXECUTABLE} ${BUILD_SUPPORT_DIR}/lint_cpp_cli.py - ${CMAKE_CURRENT_SOURCE_DIR}/src) - -if(ARROW_LINT_ONLY) - message("ARROW_LINT_ONLY was specified, this is only a partial build directory") - return() -endif() - -# -# "make clang-tidy" and "make check-clang-tidy" targets -# -if(${CLANG_TIDY_FOUND}) - # TODO check to make sure .clang-tidy is being respected - - # runs clang-tidy and attempts to fix any warning automatically - add_custom_target(clang-tidy - ${PYTHON_EXECUTABLE} - ${BUILD_SUPPORT_DIR}/run_clang_tidy.py - --clang_tidy_binary - ${CLANG_TIDY_BIN} - --compile_commands - ${CMAKE_BINARY_DIR}/compile_commands.json - ${COMMON_LINT_OPTIONS} - --fix - ${ARROW_LINT_QUIET}) - - # runs clang-tidy and exits with a non-zero exit code if any errors are found. - add_custom_target(check-clang-tidy - ${PYTHON_EXECUTABLE} - ${BUILD_SUPPORT_DIR}/run_clang_tidy.py - --clang_tidy_binary - ${CLANG_TIDY_BIN} - --compile_commands - ${CMAKE_BINARY_DIR}/compile_commands.json - ${COMMON_LINT_OPTIONS} - ${ARROW_LINT_QUIET}) -endif() - -if(UNIX) - add_custom_target(iwyu - ${CMAKE_COMMAND} - -E - env - "PYTHON=${PYTHON_EXECUTABLE}" - ${BUILD_SUPPORT_DIR}/iwyu/iwyu.sh) - add_custom_target(iwyu-all - ${CMAKE_COMMAND} - -E - env - "PYTHON=${PYTHON_EXECUTABLE}" - ${BUILD_SUPPORT_DIR}/iwyu/iwyu.sh - all) -endif(UNIX) - # datetime code used by iOS requires zlib support if(IOS) set(ARROW_WITH_ZLIB ON) diff --git a/cpp/build-support/cpplint.py b/cpp/build-support/cpplint.py deleted file mode 100755 index dc3d47ba8b4..00000000000 --- a/cpp/build-support/cpplint.py +++ /dev/null @@ -1,6927 +0,0 @@ -#!/usr/bin/env python3 -# -# Copyright (c) 2009 Google Inc. All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -"""Does google-lint on c++ files. - -The goal of this script is to identify places in the code that *may* -be in non-compliance with google style. It does not attempt to fix -up these problems -- the point is to educate. It does also not -attempt to find all problems, or to ensure that everything it does -find is legitimately a problem. - -In particular, we can get very confused by /* and // inside strings! -We do a small hack, which is to ignore //'s with "'s after them on the -same line, but it is far from perfect (in either direction). -""" - -# cpplint predates fstrings -# pylint: disable=consider-using-f-string - -# pylint: disable=invalid-name - -import codecs -import copy -import getopt -import glob -import itertools -import math # for log -import os -import re -import sre_compile -import string -import sys -import sysconfig -import unicodedata -import xml.etree.ElementTree - -# if empty, use defaults -_valid_extensions = set([]) - -__VERSION__ = '1.6.1' - -try: - # -- pylint: disable=used-before-assignment - xrange # Python 2 -except NameError: - # -- pylint: disable=redefined-builtin - xrange = range # Python 3 - - -_USAGE = """ -Syntax: cpplint.py [--verbose=#] [--output=emacs|eclipse|vs7|junit|sed|gsed] - [--filter=-x,+y,...] - [--counting=total|toplevel|detailed] [--root=subdir] - [--repository=path] - [--linelength=digits] [--headers=x,y,...] - [--recursive] - [--exclude=path] - [--extensions=hpp,cpp,...] - [--includeorder=default|standardcfirst] - [--quiet] - [--version] - [file] ... - - Style checker for C/C++ source files. - This is a fork of the Google style checker with minor extensions. - - The style guidelines this tries to follow are those in - https://google.github.io/styleguide/cppguide.html - - Every problem is given a confidence score from 1-5, with 5 meaning we are - certain of the problem, and 1 meaning it could be a legitimate construct. - This will miss some errors, and is not a substitute for a code review. - - To suppress false-positive errors of a certain category, add a - 'NOLINT(category)' comment to the line. NOLINT or NOLINT(*) - suppresses errors of all categories on that line. - - The files passed in will be linted; at least one file must be provided. - Default linted extensions are %s. - Other file types will be ignored. - Change the extensions with the --extensions flag. - - Flags: - - output=emacs|eclipse|vs7|junit|sed|gsed - By default, the output is formatted to ease emacs parsing. Visual Studio - compatible output (vs7) may also be used. Further support exists for - eclipse (eclipse), and JUnit (junit). XML parsers such as those used - in Jenkins and Bamboo may also be used. - The sed format outputs sed commands that should fix some of the errors. - Note that this requires gnu sed. If that is installed as gsed on your - system (common e.g. on macOS with homebrew) you can use the gsed output - format. Sed commands are written to stdout, not stderr, so you should be - able to pipe output straight to a shell to run the fixes. - - verbose=# - Specify a number 0-5 to restrict errors to certain verbosity levels. - Errors with lower verbosity levels have lower confidence and are more - likely to be false positives. - - quiet - Don't print anything if no errors are found. - - filter=-x,+y,... - Specify a comma-separated list of category-filters to apply: only - error messages whose category names pass the filters will be printed. - (Category names are printed with the message and look like - "[whitespace/indent]".) Filters are evaluated left to right. - "-FOO" means "do not print categories that start with FOO". - "+FOO" means "do print categories that start with FOO". - - Examples: --filter=-whitespace,+whitespace/braces - --filter=-whitespace,-runtime/printf,+runtime/printf_format - --filter=-,+build/include_what_you_use - - To see a list of all the categories used in cpplint, pass no arg: - --filter= - - counting=total|toplevel|detailed - The total number of errors found is always printed. If - 'toplevel' is provided, then the count of errors in each of - the top-level categories like 'build' and 'whitespace' will - also be printed. If 'detailed' is provided, then a count - is provided for each category like 'build/class'. - - repository=path - The top level directory of the repository, used to derive the header - guard CPP variable. By default, this is determined by searching for a - path that contains .git, .hg, or .svn. When this flag is specified, the - given path is used instead. This option allows the header guard CPP - variable to remain consistent even if members of a team have different - repository root directories (such as when checking out a subdirectory - with SVN). In addition, users of non-mainstream version control systems - can use this flag to ensure readable header guard CPP variables. - - Examples: - Assuming that Alice checks out ProjectName and Bob checks out - ProjectName/trunk and trunk contains src/chrome/ui/browser.h, then - with no --repository flag, the header guard CPP variable will be: - - Alice => TRUNK_SRC_CHROME_BROWSER_UI_BROWSER_H_ - Bob => SRC_CHROME_BROWSER_UI_BROWSER_H_ - - If Alice uses the --repository=trunk flag and Bob omits the flag or - uses --repository=. then the header guard CPP variable will be: - - Alice => SRC_CHROME_BROWSER_UI_BROWSER_H_ - Bob => SRC_CHROME_BROWSER_UI_BROWSER_H_ - - root=subdir - The root directory used for deriving header guard CPP variable. - This directory is relative to the top level directory of the repository - which by default is determined by searching for a directory that contains - .git, .hg, or .svn but can also be controlled with the --repository flag. - If the specified directory does not exist, this flag is ignored. - - Examples: - Assuming that src is the top level directory of the repository (and - cwd=top/src), the header guard CPP variables for - src/chrome/browser/ui/browser.h are: - - No flag => CHROME_BROWSER_UI_BROWSER_H_ - --root=chrome => BROWSER_UI_BROWSER_H_ - --root=chrome/browser => UI_BROWSER_H_ - --root=.. => SRC_CHROME_BROWSER_UI_BROWSER_H_ - - linelength=digits - This is the allowed line length for the project. The default value is - 80 characters. - - Examples: - --linelength=120 - - recursive - Search for files to lint recursively. Each directory given in the list - of files to be linted is replaced by all files that descend from that - directory. Files with extensions not in the valid extensions list are - excluded. - - exclude=path - Exclude the given path from the list of files to be linted. Relative - paths are evaluated relative to the current directory and shell globbing - is performed. This flag can be provided multiple times to exclude - multiple files. - - Examples: - --exclude=one.cc - --exclude=src/*.cc - --exclude=src/*.cc --exclude=test/*.cc - - extensions=extension,extension,... - The allowed file extensions that cpplint will check - - Examples: - --extensions=%s - - includeorder=default|standardcfirst - For the build/include_order rule, the default is to blindly assume angle - bracket includes with file extension are c-system-headers (default), - even knowing this will have false classifications. - The default is established at google. - standardcfirst means to instead use an allow-list of known c headers and - treat all others as separate group of "other system headers". The C headers - included are those of the C-standard lib and closely related ones. - - headers=x,y,... - The header extensions that cpplint will treat as .h in checks. Values are - automatically added to --extensions list. - (by default, only files with extensions %s will be assumed to be headers) - - Examples: - --headers=%s - --headers=hpp,hxx - --headers=hpp - - cpplint.py supports per-directory configurations specified in CPPLINT.cfg - files. CPPLINT.cfg file can contain a number of key=value pairs. - Currently the following options are supported: - - set noparent - filter=+filter1,-filter2,... - exclude_files=regex - linelength=80 - root=subdir - headers=x,y,... - - "set noparent" option prevents cpplint from traversing directory tree - upwards looking for more .cfg files in parent directories. This option - is usually placed in the top-level project directory. - - The "filter" option is similar in function to --filter flag. It specifies - message filters in addition to the |_DEFAULT_FILTERS| and those specified - through --filter command-line flag. - - "exclude_files" allows to specify a regular expression to be matched against - a file name. If the expression matches, the file is skipped and not run - through the linter. - - "linelength" allows to specify the allowed line length for the project. - - The "root" option is similar in function to the --root flag (see example - above). Paths are relative to the directory of the CPPLINT.cfg. - - The "headers" option is similar in function to the --headers flag - (see example above). - - CPPLINT.cfg has an effect on files in the same directory and all - sub-directories, unless overridden by a nested configuration file. - - Example file: - filter=-build/include_order,+build/include_alpha - exclude_files=.*\\.cc - - The above example disables build/include_order warning and enables - build/include_alpha as well as excludes all .cc from being - processed by linter, in the current directory (where the .cfg - file is located) and all sub-directories. -""" - -# We categorize each error message we print. Here are the categories. -# We want an explicit list so we can list them all in cpplint --filter=. -# If you add a new error message with a new category, add it to the list -# here! cpplint_unittest.py should tell you if you forget to do this. -_ERROR_CATEGORIES = [ - 'build/class', - 'build/c++11', - 'build/c++14', - 'build/c++tr1', - 'build/deprecated', - 'build/endif_comment', - 'build/explicit_make_pair', - 'build/forward_decl', - 'build/header_guard', - 'build/include', - 'build/include_subdir', - 'build/include_alpha', - 'build/include_order', - 'build/include_what_you_use', - 'build/namespaces_headers', - 'build/namespaces_literals', - 'build/namespaces', - 'build/printf_format', - 'build/storage_class', - 'legal/copyright', - 'readability/alt_tokens', - 'readability/braces', - 'readability/casting', - 'readability/check', - 'readability/constructors', - 'readability/fn_size', - 'readability/inheritance', - 'readability/multiline_comment', - 'readability/multiline_string', - 'readability/namespace', - 'readability/nolint', - 'readability/nul', - 'readability/strings', - 'readability/todo', - 'readability/utf8', - 'runtime/arrays', - 'runtime/casting', - 'runtime/explicit', - 'runtime/int', - 'runtime/init', - 'runtime/invalid_increment', - 'runtime/member_string_references', - 'runtime/memset', - 'runtime/indentation_namespace', - 'runtime/operator', - 'runtime/printf', - 'runtime/printf_format', - 'runtime/references', - 'runtime/string', - 'runtime/threadsafe_fn', - 'runtime/vlog', - 'whitespace/blank_line', - 'whitespace/braces', - 'whitespace/comma', - 'whitespace/comments', - 'whitespace/empty_conditional_body', - 'whitespace/empty_if_body', - 'whitespace/empty_loop_body', - 'whitespace/end_of_line', - 'whitespace/ending_newline', - 'whitespace/forcolon', - 'whitespace/indent', - 'whitespace/line_length', - 'whitespace/newline', - 'whitespace/operators', - 'whitespace/parens', - 'whitespace/semicolon', - 'whitespace/tab', - 'whitespace/todo', - ] - -# keywords to use with --outputs which generate stdout for machine processing -_MACHINE_OUTPUTS = [ - 'junit', - 'sed', - 'gsed' -] - -# These error categories are no longer enforced by cpplint, but for backwards- -# compatibility they may still appear in NOLINT comments. -_LEGACY_ERROR_CATEGORIES = [ - 'readability/streams', - 'readability/function', - ] - -# These prefixes for categories should be ignored since they relate to other -# tools which also use the NOLINT syntax, e.g. clang-tidy. -_OTHER_NOLINT_CATEGORY_PREFIXES = [ - 'clang-analyzer', - ] - -# The default state of the category filter. This is overridden by the --filter= -# flag. By default all errors are on, so only add here categories that should be -# off by default (i.e., categories that must be enabled by the --filter= flags). -# All entries here should start with a '-' or '+', as in the --filter= flag. -_DEFAULT_FILTERS = ['-build/include_alpha'] - -# The default list of categories suppressed for C (not C++) files. -_DEFAULT_C_SUPPRESSED_CATEGORIES = [ - 'readability/casting', - ] - -# The default list of categories suppressed for Linux Kernel files. -_DEFAULT_KERNEL_SUPPRESSED_CATEGORIES = [ - 'whitespace/tab', - ] - -# We used to check for high-bit characters, but after much discussion we -# decided those were OK, as long as they were in UTF-8 and didn't represent -# hard-coded international strings, which belong in a separate i18n file. - -# C++ headers -_CPP_HEADERS = frozenset([ - # Legacy - 'algobase.h', - 'algo.h', - 'alloc.h', - 'builtinbuf.h', - 'bvector.h', - 'complex.h', - 'defalloc.h', - 'deque.h', - 'editbuf.h', - 'fstream.h', - 'function.h', - 'hash_map', - 'hash_map.h', - 'hash_set', - 'hash_set.h', - 'hashtable.h', - 'heap.h', - 'indstream.h', - 'iomanip.h', - 'iostream.h', - 'istream.h', - 'iterator.h', - 'list.h', - 'map.h', - 'multimap.h', - 'multiset.h', - 'ostream.h', - 'pair.h', - 'parsestream.h', - 'pfstream.h', - 'procbuf.h', - 'pthread_alloc', - 'pthread_alloc.h', - 'rope', - 'rope.h', - 'ropeimpl.h', - 'set.h', - 'slist', - 'slist.h', - 'stack.h', - 'stdiostream.h', - 'stl_alloc.h', - 'stl_relops.h', - 'streambuf.h', - 'stream.h', - 'strfile.h', - 'strstream.h', - 'tempbuf.h', - 'tree.h', - 'type_traits.h', - 'vector.h', - # 17.6.1.2 C++ library headers - 'algorithm', - 'array', - 'atomic', - 'bitset', - 'chrono', - 'codecvt', - 'complex', - 'condition_variable', - 'deque', - 'exception', - 'forward_list', - 'fstream', - 'functional', - 'future', - 'initializer_list', - 'iomanip', - 'ios', - 'iosfwd', - 'iostream', - 'istream', - 'iterator', - 'limits', - 'list', - 'locale', - 'map', - 'memory', - 'mutex', - 'new', - 'numeric', - 'ostream', - 'queue', - 'random', - 'ratio', - 'regex', - 'scoped_allocator', - 'set', - 'sstream', - 'stack', - 'stdexcept', - 'streambuf', - 'string', - 'strstream', - 'system_error', - 'thread', - 'tuple', - 'typeindex', - 'typeinfo', - 'type_traits', - 'unordered_map', - 'unordered_set', - 'utility', - 'valarray', - 'vector', - # 17.6.1.2 C++14 headers - 'shared_mutex', - # 17.6.1.2 C++17 headers - 'any', - 'charconv', - 'codecvt', - 'execution', - 'filesystem', - 'memory_resource', - 'optional', - 'string_view', - 'variant', - # 17.6.1.2 C++ headers for C library facilities - 'cassert', - 'ccomplex', - 'cctype', - 'cerrno', - 'cfenv', - 'cfloat', - 'cinttypes', - 'ciso646', - 'climits', - 'clocale', - 'cmath', - 'csetjmp', - 'csignal', - 'cstdalign', - 'cstdarg', - 'cstdbool', - 'cstddef', - 'cstdint', - 'cstdio', - 'cstdlib', - 'cstring', - 'ctgmath', - 'ctime', - 'cuchar', - 'cwchar', - 'cwctype', - ]) - -# C headers -_C_HEADERS = frozenset([ - # System C headers - 'assert.h', - 'complex.h', - 'ctype.h', - 'errno.h', - 'fenv.h', - 'float.h', - 'inttypes.h', - 'iso646.h', - 'limits.h', - 'locale.h', - 'math.h', - 'setjmp.h', - 'signal.h', - 'stdalign.h', - 'stdarg.h', - 'stdatomic.h', - 'stdbool.h', - 'stddef.h', - 'stdint.h', - 'stdio.h', - 'stdlib.h', - 'stdnoreturn.h', - 'string.h', - 'tgmath.h', - 'threads.h', - 'time.h', - 'uchar.h', - 'wchar.h', - 'wctype.h', - # additional POSIX C headers - 'aio.h', - 'arpa/inet.h', - 'cpio.h', - 'dirent.h', - 'dlfcn.h', - 'fcntl.h', - 'fmtmsg.h', - 'fnmatch.h', - 'ftw.h', - 'glob.h', - 'grp.h', - 'iconv.h', - 'langinfo.h', - 'libgen.h', - 'monetary.h', - 'mqueue.h', - 'ndbm.h', - 'net/if.h', - 'netdb.h', - 'netinet/in.h', - 'netinet/tcp.h', - 'nl_types.h', - 'poll.h', - 'pthread.h', - 'pwd.h', - 'regex.h', - 'sched.h', - 'search.h', - 'semaphore.h', - 'setjmp.h', - 'signal.h', - 'spawn.h', - 'strings.h', - 'stropts.h', - 'syslog.h', - 'tar.h', - 'termios.h', - 'trace.h', - 'ulimit.h', - 'unistd.h', - 'utime.h', - 'utmpx.h', - 'wordexp.h', - # additional GNUlib headers - 'a.out.h', - 'aliases.h', - 'alloca.h', - 'ar.h', - 'argp.h', - 'argz.h', - 'byteswap.h', - 'crypt.h', - 'endian.h', - 'envz.h', - 'err.h', - 'error.h', - 'execinfo.h', - 'fpu_control.h', - 'fstab.h', - 'fts.h', - 'getopt.h', - 'gshadow.h', - 'ieee754.h', - 'ifaddrs.h', - 'libintl.h', - 'mcheck.h', - 'mntent.h', - 'obstack.h', - 'paths.h', - 'printf.h', - 'pty.h', - 'resolv.h', - 'shadow.h', - 'sysexits.h', - 'ttyent.h', - # Additional linux glibc headers - 'dlfcn.h', - 'elf.h', - 'features.h', - 'gconv.h', - 'gnu-versions.h', - 'lastlog.h', - 'libio.h', - 'link.h', - 'malloc.h', - 'memory.h', - 'netash/ash.h', - 'netatalk/at.h', - 'netax25/ax25.h', - 'neteconet/ec.h', - 'netipx/ipx.h', - 'netiucv/iucv.h', - 'netpacket/packet.h', - 'netrom/netrom.h', - 'netrose/rose.h', - 'nfs/nfs.h', - 'nl_types.h', - 'nss.h', - 're_comp.h', - 'regexp.h', - 'sched.h', - 'sgtty.h', - 'stab.h', - 'stdc-predef.h', - 'stdio_ext.h', - 'syscall.h', - 'termio.h', - 'thread_db.h', - 'ucontext.h', - 'ustat.h', - 'utmp.h', - 'values.h', - 'wait.h', - 'xlocale.h', - # Hardware specific headers - 'arm_neon.h', - 'emmintrin.h', - 'immintrin.h', - 'intrin.h', - 'nmmintrin.h', - 'x86intrin.h', - 'xmmintrin.h', - ]) - -# Folders of C libraries so commonly used in C++, -# that they have parity with standard C libraries. -C_STANDARD_HEADER_FOLDERS = frozenset([ - # standard C library - "sys", - # glibc for linux - "arpa", - "asm-generic", - "bits", - "gnu", - "net", - "netinet", - "protocols", - "rpc", - "rpcsvc", - "scsi", - # linux kernel header - "drm", - "linux", - "misc", - "mtd", - "rdma", - "sound", - "video", - "xen", - ]) - -# Type names -_TYPES = re.compile( - r'^(?:' - # [dcl.type.simple] - r'(char(16_t|32_t)?)|wchar_t|' - r'bool|short|int|long|signed|unsigned|float|double|' - # [support.types] - r'(ptrdiff_t|size_t|max_align_t|nullptr_t)|' - # [cstdint.syn] - r'(u?int(_fast|_least)?(8|16|32|64)_t)|' - r'(u?int(max|ptr)_t)|' - r')$') - - -# These headers are excluded from [build/include] and [build/include_order] -# checks: -# - Anything not following google file name conventions (containing an -# uppercase character, such as Python.h or nsStringAPI.h, for example). -# - Lua headers. -_THIRD_PARTY_HEADERS_PATTERN = re.compile( - r'^(?:[^/]*[A-Z][^/]*\.h|lua\.h|lauxlib\.h|lualib\.h)$') - -# Pattern for matching FileInfo.BaseName() against test file name -_test_suffixes = ['_test', '_regtest', '_unittest'] -_TEST_FILE_SUFFIX = '(' + '|'.join(_test_suffixes) + r')$' - -# Pattern that matches only complete whitespace, possibly across multiple lines. -_EMPTY_CONDITIONAL_BODY_PATTERN = re.compile(r'^\s*$', re.DOTALL) - -# Assertion macros. These are defined in base/logging.h and -# testing/base/public/gunit.h. -_CHECK_MACROS = [ - 'DCHECK', 'CHECK', - 'EXPECT_TRUE', 'ASSERT_TRUE', - 'EXPECT_FALSE', 'ASSERT_FALSE', - ] - -# Replacement macros for CHECK/DCHECK/EXPECT_TRUE/EXPECT_FALSE -_CHECK_REPLACEMENT = dict([(macro_var, {}) for macro_var in _CHECK_MACROS]) - -for op, replacement in [('==', 'EQ'), ('!=', 'NE'), - ('>=', 'GE'), ('>', 'GT'), - ('<=', 'LE'), ('<', 'LT')]: - _CHECK_REPLACEMENT['DCHECK'][op] = 'DCHECK_%s' % replacement - _CHECK_REPLACEMENT['CHECK'][op] = 'CHECK_%s' % replacement - _CHECK_REPLACEMENT['EXPECT_TRUE'][op] = 'EXPECT_%s' % replacement - _CHECK_REPLACEMENT['ASSERT_TRUE'][op] = 'ASSERT_%s' % replacement - -for op, inv_replacement in [('==', 'NE'), ('!=', 'EQ'), - ('>=', 'LT'), ('>', 'LE'), - ('<=', 'GT'), ('<', 'GE')]: - _CHECK_REPLACEMENT['EXPECT_FALSE'][op] = 'EXPECT_%s' % inv_replacement - _CHECK_REPLACEMENT['ASSERT_FALSE'][op] = 'ASSERT_%s' % inv_replacement - -# Alternative tokens and their replacements. For full list, see section 2.5 -# Alternative tokens [lex.digraph] in the C++ standard. -# -# Digraphs (such as '%:') are not included here since it's a mess to -# match those on a word boundary. -_ALT_TOKEN_REPLACEMENT = { - 'and': '&&', - 'bitor': '|', - 'or': '||', - 'xor': '^', - 'compl': '~', - 'bitand': '&', - 'and_eq': '&=', - 'or_eq': '|=', - 'xor_eq': '^=', - 'not': '!', - 'not_eq': '!=' - } - -# Compile regular expression that matches all the above keywords. The "[ =()]" -# bit is meant to avoid matching these keywords outside of boolean expressions. -# -# False positives include C-style multi-line comments and multi-line strings -# but those have always been troublesome for cpplint. -_ALT_TOKEN_REPLACEMENT_PATTERN = re.compile( - r'[ =()](' + ('|'.join(_ALT_TOKEN_REPLACEMENT.keys())) + r')(?=[ (]|$)') - - -# These constants define types of headers for use with -# _IncludeState.CheckNextIncludeOrder(). -_C_SYS_HEADER = 1 -_CPP_SYS_HEADER = 2 -_OTHER_SYS_HEADER = 3 -_LIKELY_MY_HEADER = 4 -_POSSIBLE_MY_HEADER = 5 -_OTHER_HEADER = 6 - -# These constants define the current inline assembly state -_NO_ASM = 0 # Outside of inline assembly block -_INSIDE_ASM = 1 # Inside inline assembly block -_END_ASM = 2 # Last line of inline assembly block -_BLOCK_ASM = 3 # The whole block is an inline assembly block - -# Match start of assembly blocks -_MATCH_ASM = re.compile(r'^\s*(?:asm|_asm|__asm|__asm__)' - r'(?:\s+(volatile|__volatile__))?' - r'\s*[{(]') - -# Match strings that indicate we're working on a C (not C++) file. -_SEARCH_C_FILE = re.compile(r'\b(?:LINT_C_FILE|' - r'vim?:\s*.*(\s*|:)filetype=c(\s*|:|$))') - -# Match string that indicates we're working on a Linux Kernel file. -_SEARCH_KERNEL_FILE = re.compile(r'\b(?:LINT_KERNEL_FILE)') - -# Commands for sed to fix the problem -_SED_FIXUPS = { - 'Remove spaces around =': r's/ = /=/', - 'Remove spaces around !=': r's/ != /!=/', - 'Remove space before ( in if (': r's/if (/if(/', - 'Remove space before ( in for (': r's/for (/for(/', - 'Remove space before ( in while (': r's/while (/while(/', - 'Remove space before ( in switch (': r's/switch (/switch(/', - 'Should have a space between // and comment': r's/\/\//\/\/ /', - 'Missing space before {': r's/\([^ ]\){/\1 {/', - 'Tab found, replace by spaces': r's/\t/ /g', - 'Line ends in whitespace. Consider deleting these extra spaces.': r's/\s*$//', - 'You don\'t need a ; after a }': r's/};/}/', - 'Missing space after ,': r's/,\([^ ]\)/, \1/g', -} - -_regexp_compile_cache = {} - -# {str, set(int)}: a map from error categories to sets of linenumbers -# on which those errors are expected and should be suppressed. -_error_suppressions = {} - -# The root directory used for deriving header guard CPP variable. -# This is set by --root flag. -_root = None -_root_debug = False - -# The top level repository directory. If set, _root is calculated relative to -# this directory instead of the directory containing version control artifacts. -# This is set by the --repository flag. -_repository = None - -# Files to exclude from linting. This is set by the --exclude flag. -_excludes = None - -# Whether to suppress all PrintInfo messages, UNRELATED to --quiet flag -_quiet = False - -# The allowed line length of files. -# This is set by --linelength flag. -_line_length = 80 - -# This allows to use different include order rule than default -_include_order = "default" - -try: - # -- pylint: disable=used-before-assignment - unicode -except NameError: - # -- pylint: disable=redefined-builtin - basestring = unicode = str - -try: - # -- pylint: disable=used-before-assignment - long -except NameError: - # -- pylint: disable=redefined-builtin - long = int - -if sys.version_info < (3,): - # -- pylint: disable=no-member - # BINARY_TYPE = str - itervalues = dict.itervalues - iteritems = dict.iteritems -else: - # BINARY_TYPE = bytes - itervalues = dict.values - iteritems = dict.items - -def unicode_escape_decode(x): - if sys.version_info < (3,): - return codecs.unicode_escape_decode(x)[0] - else: - return x - -# Treat all headers starting with 'h' equally: .h, .hpp, .hxx etc. -# This is set by --headers flag. -_hpp_headers = set([]) - -# {str, bool}: a map from error categories to booleans which indicate if the -# category should be suppressed for every line. -_global_error_suppressions = {} - -def ProcessHppHeadersOption(val): - global _hpp_headers - try: - _hpp_headers = {ext.strip() for ext in val.split(',')} - except ValueError: - PrintUsage('Header extensions must be comma separated list.') - -def ProcessIncludeOrderOption(val): - if val is None or val == "default": - pass - elif val == "standardcfirst": - global _include_order - _include_order = val - else: - PrintUsage('Invalid includeorder value %s. Expected default|standardcfirst') - -def IsHeaderExtension(file_extension): - return file_extension in GetHeaderExtensions() - -def GetHeaderExtensions(): - if _hpp_headers: - return _hpp_headers - if _valid_extensions: - return {h for h in _valid_extensions if 'h' in h} - return set(['h', 'hh', 'hpp', 'hxx', 'h++', 'cuh']) - -# The allowed extensions for file names -# This is set by --extensions flag -def GetAllExtensions(): - return GetHeaderExtensions().union(_valid_extensions or set( - ['c', 'cc', 'cpp', 'cxx', 'c++', 'cu'])) - -def ProcessExtensionsOption(val): - global _valid_extensions - try: - extensions = [ext.strip() for ext in val.split(',')] - _valid_extensions = set(extensions) - except ValueError: - PrintUsage('Extensions should be a comma-separated list of values;' - 'for example: extensions=hpp,cpp\n' - 'This could not be parsed: "%s"' % (val,)) - -def GetNonHeaderExtensions(): - return GetAllExtensions().difference(GetHeaderExtensions()) - -def ParseNolintSuppressions(filename, raw_line, linenum, error): - """Updates the global list of line error-suppressions. - - Parses any NOLINT comments on the current line, updating the global - error_suppressions store. Reports an error if the NOLINT comment - was malformed. - - Args: - filename: str, the name of the input file. - raw_line: str, the line of input text, with comments. - linenum: int, the number of the current line. - error: function, an error handler. - """ - matched = Search(r'\bNOLINT(NEXTLINE)?\b(\([^)]+\))?', raw_line) - if matched: - if matched.group(1): - suppressed_line = linenum + 1 - else: - suppressed_line = linenum - category = matched.group(2) - if category in (None, '(*)'): # => "suppress all" - _error_suppressions.setdefault(None, set()).add(suppressed_line) - else: - if category.startswith('(') and category.endswith(')'): - category = category[1:-1] - if category in _ERROR_CATEGORIES: - _error_suppressions.setdefault(category, set()).add(suppressed_line) - elif any(c for c in _OTHER_NOLINT_CATEGORY_PREFIXES if category.startswith(c)): - # Ignore any categories from other tools. - pass - elif category not in _LEGACY_ERROR_CATEGORIES: - error(filename, linenum, 'readability/nolint', 5, - 'Unknown NOLINT error category: %s' % category) - - -def ProcessGlobalSuppressions(lines): - """Updates the list of global error suppressions. - - Parses any lint directives in the file that have global effect. - - Args: - lines: An array of strings, each representing a line of the file, with the - last element being empty if the file is terminated with a newline. - """ - for line in lines: - if _SEARCH_C_FILE.search(line): - for category in _DEFAULT_C_SUPPRESSED_CATEGORIES: - _global_error_suppressions[category] = True - if _SEARCH_KERNEL_FILE.search(line): - for category in _DEFAULT_KERNEL_SUPPRESSED_CATEGORIES: - _global_error_suppressions[category] = True - - -def ResetNolintSuppressions(): - """Resets the set of NOLINT suppressions to empty.""" - _error_suppressions.clear() - _global_error_suppressions.clear() - - -def IsErrorSuppressedByNolint(category, linenum): - """Returns true if the specified error category is suppressed on this line. - - Consults the global error_suppressions map populated by - ParseNolintSuppressions/ProcessGlobalSuppressions/ResetNolintSuppressions. - - Args: - category: str, the category of the error. - linenum: int, the current line number. - Returns: - bool, True iff the error should be suppressed due to a NOLINT comment or - global suppression. - """ - return (_global_error_suppressions.get(category, False) or - linenum in _error_suppressions.get(category, set()) or - linenum in _error_suppressions.get(None, set())) - - -def Match(pattern, s): - """Matches the string with the pattern, caching the compiled regexp.""" - # The regexp compilation caching is inlined in both Match and Search for - # performance reasons; factoring it out into a separate function turns out - # to be noticeably expensive. - if pattern not in _regexp_compile_cache: - _regexp_compile_cache[pattern] = sre_compile.compile(pattern) - return _regexp_compile_cache[pattern].match(s) - - -def ReplaceAll(pattern, rep, s): - """Replaces instances of pattern in a string with a replacement. - - The compiled regex is kept in a cache shared by Match and Search. - - Args: - pattern: regex pattern - rep: replacement text - s: search string - - Returns: - string with replacements made (or original string if no replacements) - """ - if pattern not in _regexp_compile_cache: - _regexp_compile_cache[pattern] = sre_compile.compile(pattern) - return _regexp_compile_cache[pattern].sub(rep, s) - - -def Search(pattern, s): - """Searches the string for the pattern, caching the compiled regexp.""" - if pattern not in _regexp_compile_cache: - _regexp_compile_cache[pattern] = sre_compile.compile(pattern) - return _regexp_compile_cache[pattern].search(s) - - -def _IsSourceExtension(s): - """File extension (excluding dot) matches a source file extension.""" - return s in GetNonHeaderExtensions() - - -class _IncludeState(object): - """Tracks line numbers for includes, and the order in which includes appear. - - include_list contains list of lists of (header, line number) pairs. - It's a lists of lists rather than just one flat list to make it - easier to update across preprocessor boundaries. - - Call CheckNextIncludeOrder() once for each header in the file, passing - in the type constants defined above. Calls in an illegal order will - raise an _IncludeError with an appropriate error message. - - """ - # self._section will move monotonically through this set. If it ever - # needs to move backwards, CheckNextIncludeOrder will raise an error. - _INITIAL_SECTION = 0 - _MY_H_SECTION = 1 - _C_SECTION = 2 - _CPP_SECTION = 3 - _OTHER_SYS_SECTION = 4 - _OTHER_H_SECTION = 5 - - _TYPE_NAMES = { - _C_SYS_HEADER: 'C system header', - _CPP_SYS_HEADER: 'C++ system header', - _OTHER_SYS_HEADER: 'other system header', - _LIKELY_MY_HEADER: 'header this file implements', - _POSSIBLE_MY_HEADER: 'header this file may implement', - _OTHER_HEADER: 'other header', - } - _SECTION_NAMES = { - _INITIAL_SECTION: "... nothing. (This can't be an error.)", - _MY_H_SECTION: 'a header this file implements', - _C_SECTION: 'C system header', - _CPP_SECTION: 'C++ system header', - _OTHER_SYS_SECTION: 'other system header', - _OTHER_H_SECTION: 'other header', - } - - def __init__(self): - self.include_list = [[]] - self._section = None - self._last_header = None - self.ResetSection('') - - def FindHeader(self, header): - """Check if a header has already been included. - - Args: - header: header to check. - Returns: - Line number of previous occurrence, or -1 if the header has not - been seen before. - """ - for section_list in self.include_list: - for f in section_list: - if f[0] == header: - return f[1] - return -1 - - def ResetSection(self, directive): - """Reset section checking for preprocessor directive. - - Args: - directive: preprocessor directive (e.g. "if", "else"). - """ - # The name of the current section. - self._section = self._INITIAL_SECTION - # The path of last found header. - self._last_header = '' - - # Update list of includes. Note that we never pop from the - # include list. - if directive in ('if', 'ifdef', 'ifndef'): - self.include_list.append([]) - elif directive in ('else', 'elif'): - self.include_list[-1] = [] - - def SetLastHeader(self, header_path): - self._last_header = header_path - - def CanonicalizeAlphabeticalOrder(self, header_path): - """Returns a path canonicalized for alphabetical comparison. - - - replaces "-" with "_" so they both cmp the same. - - removes '-inl' since we don't require them to be after the main header. - - lowercase everything, just in case. - - Args: - header_path: Path to be canonicalized. - - Returns: - Canonicalized path. - """ - return header_path.replace('-inl.h', '.h').replace('-', '_').lower() - - def IsInAlphabeticalOrder(self, clean_lines, linenum, header_path): - """Check if a header is in alphabetical order with the previous header. - - Args: - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - header_path: Canonicalized header to be checked. - - Returns: - Returns true if the header is in alphabetical order. - """ - # If previous section is different from current section, _last_header will - # be reset to empty string, so it's always less than current header. - # - # If previous line was a blank line, assume that the headers are - # intentionally sorted the way they are. - if (self._last_header > header_path and - Match(r'^\s*#\s*include\b', clean_lines.elided[linenum - 1])): - return False - return True - - def CheckNextIncludeOrder(self, header_type): - """Returns a non-empty error message if the next header is out of order. - - This function also updates the internal state to be ready to check - the next include. - - Args: - header_type: One of the _XXX_HEADER constants defined above. - - Returns: - The empty string if the header is in the right order, or an - error message describing what's wrong. - - """ - error_message = ('Found %s after %s' % - (self._TYPE_NAMES[header_type], - self._SECTION_NAMES[self._section])) - - last_section = self._section - - if header_type == _C_SYS_HEADER: - if self._section <= self._C_SECTION: - self._section = self._C_SECTION - else: - self._last_header = '' - return error_message - elif header_type == _CPP_SYS_HEADER: - if self._section <= self._CPP_SECTION: - self._section = self._CPP_SECTION - else: - self._last_header = '' - return error_message - elif header_type == _OTHER_SYS_HEADER: - if self._section <= self._OTHER_SYS_SECTION: - self._section = self._OTHER_SYS_SECTION - else: - self._last_header = '' - return error_message - elif header_type == _LIKELY_MY_HEADER: - if self._section <= self._MY_H_SECTION: - self._section = self._MY_H_SECTION - else: - self._section = self._OTHER_H_SECTION - elif header_type == _POSSIBLE_MY_HEADER: - if self._section <= self._MY_H_SECTION: - self._section = self._MY_H_SECTION - else: - # This will always be the fallback because we're not sure - # enough that the header is associated with this file. - self._section = self._OTHER_H_SECTION - else: - assert header_type == _OTHER_HEADER - self._section = self._OTHER_H_SECTION - - if last_section != self._section: - self._last_header = '' - - return '' - - -class _CppLintState(object): - """Maintains module-wide state..""" - - def __init__(self): - self.verbose_level = 1 # global setting. - self.error_count = 0 # global count of reported errors - # filters to apply when emitting error messages - self.filters = _DEFAULT_FILTERS[:] - # backup of filter list. Used to restore the state after each file. - self._filters_backup = self.filters[:] - self.counting = 'total' # In what way are we counting errors? - self.errors_by_category = {} # string to int dict storing error counts - self.quiet = False # Suppress non-error messages? - - # output format: - # "emacs" - format that emacs can parse (default) - # "eclipse" - format that eclipse can parse - # "vs7" - format that Microsoft Visual Studio 7 can parse - # "junit" - format that Jenkins, Bamboo, etc can parse - # "sed" - returns a gnu sed command to fix the problem - # "gsed" - like sed, but names the command gsed, e.g. for macOS homebrew users - self.output_format = 'emacs' - - # For JUnit output, save errors and failures until the end so that they - # can be written into the XML - self._junit_errors = [] - self._junit_failures = [] - - def SetOutputFormat(self, output_format): - """Sets the output format for errors.""" - self.output_format = output_format - - def SetQuiet(self, quiet): - """Sets the module's quiet settings, and returns the previous setting.""" - last_quiet = self.quiet - self.quiet = quiet - return last_quiet - - def SetVerboseLevel(self, level): - """Sets the module's verbosity, and returns the previous setting.""" - last_verbose_level = self.verbose_level - self.verbose_level = level - return last_verbose_level - - def SetCountingStyle(self, counting_style): - """Sets the module's counting options.""" - self.counting = counting_style - - def SetFilters(self, filters): - """Sets the error-message filters. - - These filters are applied when deciding whether to emit a given - error message. - - Args: - filters: A string of comma-separated filters (eg "+whitespace/indent"). - Each filter should start with + or -; else we die. - - Raises: - ValueError: The comma-separated filters did not all start with '+' or '-'. - E.g. "-,+whitespace,-whitespace/indent,whitespace/badfilter" - """ - # Default filters always have less priority than the flag ones. - self.filters = _DEFAULT_FILTERS[:] - self.AddFilters(filters) - - def AddFilters(self, filters): - """ Adds more filters to the existing list of error-message filters. """ - for filt in filters.split(','): - clean_filt = filt.strip() - if clean_filt: - self.filters.append(clean_filt) - for filt in self.filters: - if not (filt.startswith('+') or filt.startswith('-')): - raise ValueError('Every filter in --filters must start with + or -' - ' (%s does not)' % filt) - - def BackupFilters(self): - """ Saves the current filter list to backup storage.""" - self._filters_backup = self.filters[:] - - def RestoreFilters(self): - """ Restores filters previously backed up.""" - self.filters = self._filters_backup[:] - - def ResetErrorCounts(self): - """Sets the module's error statistic back to zero.""" - self.error_count = 0 - self.errors_by_category = {} - - def IncrementErrorCount(self, category): - """Bumps the module's error statistic.""" - self.error_count += 1 - if self.counting in ('toplevel', 'detailed'): - if self.counting != 'detailed': - category = category.split('/')[0] - if category not in self.errors_by_category: - self.errors_by_category[category] = 0 - self.errors_by_category[category] += 1 - - def PrintErrorCounts(self): - """Print a summary of errors by category, and the total.""" - for category, count in sorted(iteritems(self.errors_by_category)): - self.PrintInfo('Category \'%s\' errors found: %d\n' % - (category, count)) - if self.error_count > 0: - self.PrintInfo('Total errors found: %d\n' % self.error_count) - - def PrintInfo(self, message): - # _quiet does not represent --quiet flag. - # Hide infos from stdout to keep stdout pure for machine consumption - if not _quiet and self.output_format not in _MACHINE_OUTPUTS: - sys.stdout.write(message) - - def PrintError(self, message): - if self.output_format == 'junit': - self._junit_errors.append(message) - else: - sys.stderr.write(message) - - def AddJUnitFailure(self, filename, linenum, message, category, confidence): - self._junit_failures.append((filename, linenum, message, category, - confidence)) - - def FormatJUnitXML(self): - num_errors = len(self._junit_errors) - num_failures = len(self._junit_failures) - - testsuite = xml.etree.ElementTree.Element('testsuite') - testsuite.attrib['errors'] = str(num_errors) - testsuite.attrib['failures'] = str(num_failures) - testsuite.attrib['name'] = 'cpplint' - - if num_errors == 0 and num_failures == 0: - testsuite.attrib['tests'] = str(1) - xml.etree.ElementTree.SubElement(testsuite, 'testcase', name='passed') - - else: - testsuite.attrib['tests'] = str(num_errors + num_failures) - if num_errors > 0: - testcase = xml.etree.ElementTree.SubElement(testsuite, 'testcase') - testcase.attrib['name'] = 'errors' - error = xml.etree.ElementTree.SubElement(testcase, 'error') - error.text = '\n'.join(self._junit_errors) - if num_failures > 0: - # Group failures by file - failed_file_order = [] - failures_by_file = {} - for failure in self._junit_failures: - failed_file = failure[0] - if failed_file not in failed_file_order: - failed_file_order.append(failed_file) - failures_by_file[failed_file] = [] - failures_by_file[failed_file].append(failure) - # Create a testcase for each file - for failed_file in failed_file_order: - failures = failures_by_file[failed_file] - testcase = xml.etree.ElementTree.SubElement(testsuite, 'testcase') - testcase.attrib['name'] = failed_file - failure = xml.etree.ElementTree.SubElement(testcase, 'failure') - template = '{0}: {1} [{2}] [{3}]' - texts = [template.format(f[1], f[2], f[3], f[4]) for f in failures] - failure.text = '\n'.join(texts) - - xml_decl = '\n' - return xml_decl + xml.etree.ElementTree.tostring(testsuite, 'utf-8').decode('utf-8') - - -_cpplint_state = _CppLintState() - - -def _OutputFormat(): - """Gets the module's output format.""" - return _cpplint_state.output_format - - -def _SetOutputFormat(output_format): - """Sets the module's output format.""" - _cpplint_state.SetOutputFormat(output_format) - -def _Quiet(): - """Return's the module's quiet setting.""" - return _cpplint_state.quiet - -def _SetQuiet(quiet): - """Set the module's quiet status, and return previous setting.""" - return _cpplint_state.SetQuiet(quiet) - - -def _VerboseLevel(): - """Returns the module's verbosity setting.""" - return _cpplint_state.verbose_level - - -def _SetVerboseLevel(level): - """Sets the module's verbosity, and returns the previous setting.""" - return _cpplint_state.SetVerboseLevel(level) - - -def _SetCountingStyle(level): - """Sets the module's counting options.""" - _cpplint_state.SetCountingStyle(level) - - -def _Filters(): - """Returns the module's list of output filters, as a list.""" - return _cpplint_state.filters - - -def _SetFilters(filters): - """Sets the module's error-message filters. - - These filters are applied when deciding whether to emit a given - error message. - - Args: - filters: A string of comma-separated filters (eg "whitespace/indent"). - Each filter should start with + or -; else we die. - """ - _cpplint_state.SetFilters(filters) - -def _AddFilters(filters): - """Adds more filter overrides. - - Unlike _SetFilters, this function does not reset the current list of filters - available. - - Args: - filters: A string of comma-separated filters (eg "whitespace/indent"). - Each filter should start with + or -; else we die. - """ - _cpplint_state.AddFilters(filters) - -def _BackupFilters(): - """ Saves the current filter list to backup storage.""" - _cpplint_state.BackupFilters() - -def _RestoreFilters(): - """ Restores filters previously backed up.""" - _cpplint_state.RestoreFilters() - -class _FunctionState(object): - """Tracks current function name and the number of lines in its body.""" - - _NORMAL_TRIGGER = 250 # for --v=0, 500 for --v=1, etc. - _TEST_TRIGGER = 400 # about 50% more than _NORMAL_TRIGGER. - - def __init__(self): - self.in_a_function = False - self.lines_in_function = 0 - self.current_function = '' - - def Begin(self, function_name): - """Start analyzing function body. - - Args: - function_name: The name of the function being tracked. - """ - self.in_a_function = True - self.lines_in_function = 0 - self.current_function = function_name - - def Count(self): - """Count line in current function body.""" - if self.in_a_function: - self.lines_in_function += 1 - - def Check(self, error, filename, linenum): - """Report if too many lines in function body. - - Args: - error: The function to call with any errors found. - filename: The name of the current file. - linenum: The number of the line to check. - """ - if not self.in_a_function: - return - - if Match(r'T(EST|est)', self.current_function): - base_trigger = self._TEST_TRIGGER - else: - base_trigger = self._NORMAL_TRIGGER - trigger = base_trigger * 2**_VerboseLevel() - - if self.lines_in_function > trigger: - error_level = int(math.log(self.lines_in_function / base_trigger, 2)) - # 50 => 0, 100 => 1, 200 => 2, 400 => 3, 800 => 4, 1600 => 5, ... - if error_level > 5: - error_level = 5 - error(filename, linenum, 'readability/fn_size', error_level, - 'Small and focused functions are preferred:' - ' %s has %d non-comment lines' - ' (error triggered by exceeding %d lines).' % ( - self.current_function, self.lines_in_function, trigger)) - - def End(self): - """Stop analyzing function body.""" - self.in_a_function = False - - -class _IncludeError(Exception): - """Indicates a problem with the include order in a file.""" - pass - - -class FileInfo(object): - """Provides utility functions for filenames. - - FileInfo provides easy access to the components of a file's path - relative to the project root. - """ - - def __init__(self, filename): - self._filename = filename - - def FullName(self): - """Make Windows paths like Unix.""" - return os.path.abspath(self._filename).replace('\\', '/') - - def RepositoryName(self): - r"""FullName after removing the local path to the repository. - - If we have a real absolute path name here we can try to do something smart: - detecting the root of the checkout and truncating /path/to/checkout from - the name so that we get header guards that don't include things like - "C:\\Documents and Settings\\..." or "/home/username/..." in them and thus - people on different computers who have checked the source out to different - locations won't see bogus errors. - """ - fullname = self.FullName() - - if os.path.exists(fullname): - project_dir = os.path.dirname(fullname) - - # If the user specified a repository path, it exists, and the file is - # contained in it, use the specified repository path - if _repository: - repo = FileInfo(_repository).FullName() - root_dir = project_dir - while os.path.exists(root_dir): - # allow case-insensitive compare on Windows - if os.path.normcase(root_dir) == os.path.normcase(repo): - return os.path.relpath(fullname, root_dir).replace('\\', '/') - one_up_dir = os.path.dirname(root_dir) - if one_up_dir == root_dir: - break - root_dir = one_up_dir - - if os.path.exists(os.path.join(project_dir, ".svn")): - # If there's a .svn file in the current directory, we recursively look - # up the directory tree for the top of the SVN checkout - root_dir = project_dir - one_up_dir = os.path.dirname(root_dir) - while os.path.exists(os.path.join(one_up_dir, ".svn")): - root_dir = os.path.dirname(root_dir) - one_up_dir = os.path.dirname(one_up_dir) - - prefix = os.path.commonprefix([root_dir, project_dir]) - return fullname[len(prefix) + 1:] - - # Not SVN <= 1.6? Try to find a git, hg, or svn top level directory by - # searching up from the current path. - root_dir = current_dir = os.path.dirname(fullname) - while current_dir != os.path.dirname(current_dir): - if (os.path.exists(os.path.join(current_dir, ".git")) or - os.path.exists(os.path.join(current_dir, ".hg")) or - os.path.exists(os.path.join(current_dir, ".svn"))): - root_dir = current_dir - current_dir = os.path.dirname(current_dir) - - if (os.path.exists(os.path.join(root_dir, ".git")) or - os.path.exists(os.path.join(root_dir, ".hg")) or - os.path.exists(os.path.join(root_dir, ".svn"))): - prefix = os.path.commonprefix([root_dir, project_dir]) - return fullname[len(prefix) + 1:] - - # Don't know what to do; header guard warnings may be wrong... - return fullname - - def Split(self): - """Splits the file into the directory, basename, and extension. - - For 'chrome/browser/browser.cc', Split() would - return ('chrome/browser', 'browser', '.cc') - - Returns: - A tuple of (directory, basename, extension). - """ - - googlename = self.RepositoryName() - project, rest = os.path.split(googlename) - return (project,) + os.path.splitext(rest) - - def BaseName(self): - """File base name - text after the final slash, before the final period.""" - return self.Split()[1] - - def Extension(self): - """File extension - text following the final period, includes that period.""" - return self.Split()[2] - - def NoExtension(self): - """File has no source file extension.""" - return '/'.join(self.Split()[0:2]) - - def IsSource(self): - """File has a source file extension.""" - return _IsSourceExtension(self.Extension()[1:]) - - -def _ShouldPrintError(category, confidence, linenum): - """If confidence >= verbose, category passes filter and is not suppressed.""" - - # There are three ways we might decide not to print an error message: - # a "NOLINT(category)" comment appears in the source, - # the verbosity level isn't high enough, or the filters filter it out. - if IsErrorSuppressedByNolint(category, linenum): - return False - - if confidence < _cpplint_state.verbose_level: - return False - - is_filtered = False - for one_filter in _Filters(): - if one_filter.startswith('-'): - if category.startswith(one_filter[1:]): - is_filtered = True - elif one_filter.startswith('+'): - if category.startswith(one_filter[1:]): - is_filtered = False - else: - assert False # should have been checked for in SetFilter. - if is_filtered: - return False - - return True - - -def Error(filename, linenum, category, confidence, message): - """Logs the fact we've found a lint error. - - We log where the error was found, and also our confidence in the error, - that is, how certain we are this is a legitimate style regression, and - not a misidentification or a use that's sometimes justified. - - False positives can be suppressed by the use of - "cpplint(category)" comments on the offending line. These are - parsed into _error_suppressions. - - Args: - filename: The name of the file containing the error. - linenum: The number of the line containing the error. - category: A string used to describe the "category" this bug - falls under: "whitespace", say, or "runtime". Categories - may have a hierarchy separated by slashes: "whitespace/indent". - confidence: A number from 1-5 representing a confidence score for - the error, with 5 meaning that we are certain of the problem, - and 1 meaning that it could be a legitimate construct. - message: The error message. - """ - if _ShouldPrintError(category, confidence, linenum): - _cpplint_state.IncrementErrorCount(category) - if _cpplint_state.output_format == 'vs7': - _cpplint_state.PrintError('%s(%s): error cpplint: [%s] %s [%d]\n' % ( - filename, linenum, category, message, confidence)) - elif _cpplint_state.output_format == 'eclipse': - sys.stderr.write('%s:%s: warning: %s [%s] [%d]\n' % ( - filename, linenum, message, category, confidence)) - elif _cpplint_state.output_format == 'junit': - _cpplint_state.AddJUnitFailure(filename, linenum, message, category, - confidence) - elif _cpplint_state.output_format in ['sed', 'gsed']: - if message in _SED_FIXUPS: - sys.stdout.write(_cpplint_state.output_format + " -i '%s%s' %s # %s [%s] [%d]\n" % ( - linenum, _SED_FIXUPS[message], filename, message, category, confidence)) - else: - sys.stderr.write('# %s:%s: "%s" [%s] [%d]\n' % ( - filename, linenum, message, category, confidence)) - else: - final_message = '%s:%s: %s [%s] [%d]\n' % ( - filename, linenum, message, category, confidence) - sys.stderr.write(final_message) - -# Matches standard C++ escape sequences per 2.13.2.3 of the C++ standard. -_RE_PATTERN_CLEANSE_LINE_ESCAPES = re.compile( - r'\\([abfnrtv?"\\\']|\d+|x[0-9a-fA-F]+)') -# Match a single C style comment on the same line. -_RE_PATTERN_C_COMMENTS = r'/\*(?:[^*]|\*(?!/))*\*/' -# Matches multi-line C style comments. -# This RE is a little bit more complicated than one might expect, because we -# have to take care of space removals tools so we can handle comments inside -# statements better. -# The current rule is: We only clear spaces from both sides when we're at the -# end of the line. Otherwise, we try to remove spaces from the right side, -# if this doesn't work we try on left side but only if there's a non-character -# on the right. -_RE_PATTERN_CLEANSE_LINE_C_COMMENTS = re.compile( - r'(\s*' + _RE_PATTERN_C_COMMENTS + r'\s*$|' + - _RE_PATTERN_C_COMMENTS + r'\s+|' + - r'\s+' + _RE_PATTERN_C_COMMENTS + r'(?=\W)|' + - _RE_PATTERN_C_COMMENTS + r')') - - -def IsCppString(line): - """Does line terminate so, that the next symbol is in string constant. - - This function does not consider comments at all. - - Args: - line: is a partial line of code starting from the 0..n. - - Returns: - True, if next character appended to 'line' is inside a - string constant. - """ - - line = line.replace(r'\\', 'XX') # after this, \\" does not match to \" - return ((line.count('"') - line.count(r'\"') - line.count("'\"'")) & 1) == 1 - - -def CleanseRawStrings(raw_lines): - """Removes C++11 raw strings from lines. - - Before: - static const char kData[] = R"( - multi-line string - )"; - - After: - static const char kData[] = "" - (replaced by blank line) - ""; - - Args: - raw_lines: list of raw lines. - - Returns: - list of lines with C++11 raw strings replaced by empty strings. - """ - - delimiter = None - lines_without_raw_strings = [] - for line in raw_lines: - if delimiter: - # Inside a raw string, look for the end - end = line.find(delimiter) - if end >= 0: - # Found the end of the string, match leading space for this - # line and resume copying the original lines, and also insert - # a "" on the last line. - leading_space = Match(r'^(\s*)\S', line) - line = leading_space.group(1) + '""' + line[end + len(delimiter):] - delimiter = None - else: - # Haven't found the end yet, append a blank line. - line = '""' - - # Look for beginning of a raw string, and replace them with - # empty strings. This is done in a loop to handle multiple raw - # strings on the same line. - while delimiter is None: - # Look for beginning of a raw string. - # See 2.14.15 [lex.string] for syntax. - # - # Once we have matched a raw string, we check the prefix of the - # line to make sure that the line is not part of a single line - # comment. It's done this way because we remove raw strings - # before removing comments as opposed to removing comments - # before removing raw strings. This is because there are some - # cpplint checks that requires the comments to be preserved, but - # we don't want to check comments that are inside raw strings. - matched = Match(r'^(.*?)\b(?:R|u8R|uR|UR|LR)"([^\s\\()]*)\((.*)$', line) - if (matched and - not Match(r'^([^\'"]|\'(\\.|[^\'])*\'|"(\\.|[^"])*")*//', - matched.group(1))): - delimiter = ')' + matched.group(2) + '"' - - end = matched.group(3).find(delimiter) - if end >= 0: - # Raw string ended on same line - line = (matched.group(1) + '""' + - matched.group(3)[end + len(delimiter):]) - delimiter = None - else: - # Start of a multi-line raw string - line = matched.group(1) + '""' - else: - break - - lines_without_raw_strings.append(line) - - # TODO(unknown): if delimiter is not None here, we might want to - # emit a warning for unterminated string. - return lines_without_raw_strings - - -def FindNextMultiLineCommentStart(lines, lineix): - """Find the beginning marker for a multiline comment.""" - while lineix < len(lines): - if lines[lineix].strip().startswith('/*'): - # Only return this marker if the comment goes beyond this line - if lines[lineix].strip().find('*/', 2) < 0: - return lineix - lineix += 1 - return len(lines) - - -def FindNextMultiLineCommentEnd(lines, lineix): - """We are inside a comment, find the end marker.""" - while lineix < len(lines): - if lines[lineix].strip().endswith('*/'): - return lineix - lineix += 1 - return len(lines) - - -def RemoveMultiLineCommentsFromRange(lines, begin, end): - """Clears a range of lines for multi-line comments.""" - # Having // comments makes the lines non-empty, so we will not get - # unnecessary blank line warnings later in the code. - for i in range(begin, end): - lines[i] = '/**/' - - -def RemoveMultiLineComments(filename, lines, error): - """Removes multiline (c-style) comments from lines.""" - lineix = 0 - while lineix < len(lines): - lineix_begin = FindNextMultiLineCommentStart(lines, lineix) - if lineix_begin >= len(lines): - return - lineix_end = FindNextMultiLineCommentEnd(lines, lineix_begin) - if lineix_end >= len(lines): - error(filename, lineix_begin + 1, 'readability/multiline_comment', 5, - 'Could not find end of multi-line comment') - return - RemoveMultiLineCommentsFromRange(lines, lineix_begin, lineix_end + 1) - lineix = lineix_end + 1 - - -def CleanseComments(line): - """Removes //-comments and single-line C-style /* */ comments. - - Args: - line: A line of C++ source. - - Returns: - The line with single-line comments removed. - """ - commentpos = line.find('//') - if commentpos != -1 and not IsCppString(line[:commentpos]): - line = line[:commentpos].rstrip() - # get rid of /* ... */ - return _RE_PATTERN_CLEANSE_LINE_C_COMMENTS.sub('', line) - - -class CleansedLines(object): - """Holds 4 copies of all lines with different preprocessing applied to them. - - 1) elided member contains lines without strings and comments. - 2) lines member contains lines without comments. - 3) raw_lines member contains all the lines without processing. - 4) lines_without_raw_strings member is same as raw_lines, but with C++11 raw - strings removed. - All these members are of , and of the same length. - """ - - def __init__(self, lines): - self.elided = [] - self.lines = [] - self.raw_lines = lines - self.num_lines = len(lines) - self.lines_without_raw_strings = CleanseRawStrings(lines) - # # pylint: disable=consider-using-enumerate - for linenum in range(len(self.lines_without_raw_strings)): - self.lines.append(CleanseComments( - self.lines_without_raw_strings[linenum])) - elided = self._CollapseStrings(self.lines_without_raw_strings[linenum]) - self.elided.append(CleanseComments(elided)) - - def NumLines(self): - """Returns the number of lines represented.""" - return self.num_lines - - @staticmethod - def _CollapseStrings(elided): - """Collapses strings and chars on a line to simple "" or '' blocks. - - We nix strings first so we're not fooled by text like '"http://"' - - Args: - elided: The line being processed. - - Returns: - The line with collapsed strings. - """ - if _RE_PATTERN_INCLUDE.match(elided): - return elided - - # Remove escaped characters first to make quote/single quote collapsing - # basic. Things that look like escaped characters shouldn't occur - # outside of strings and chars. - elided = _RE_PATTERN_CLEANSE_LINE_ESCAPES.sub('', elided) - - # Replace quoted strings and digit separators. Both single quotes - # and double quotes are processed in the same loop, otherwise - # nested quotes wouldn't work. - collapsed = '' - while True: - # Find the first quote character - match = Match(r'^([^\'"]*)([\'"])(.*)$', elided) - if not match: - collapsed += elided - break - head, quote, tail = match.groups() - - if quote == '"': - # Collapse double quoted strings - second_quote = tail.find('"') - if second_quote >= 0: - collapsed += head + '""' - elided = tail[second_quote + 1:] - else: - # Unmatched double quote, don't bother processing the rest - # of the line since this is probably a multiline string. - collapsed += elided - break - else: - # Found single quote, check nearby text to eliminate digit separators. - # - # There is no special handling for floating point here, because - # the integer/fractional/exponent parts would all be parsed - # correctly as long as there are digits on both sides of the - # separator. So we are fine as long as we don't see something - # like "0.'3" (gcc 4.9.0 will not allow this literal). - if Search(r'\b(?:0[bBxX]?|[1-9])[0-9a-fA-F]*$', head): - match_literal = Match(r'^((?:\'?[0-9a-zA-Z_])*)(.*)$', "'" + tail) - collapsed += head + match_literal.group(1).replace("'", '') - elided = match_literal.group(2) - else: - second_quote = tail.find('\'') - if second_quote >= 0: - collapsed += head + "''" - elided = tail[second_quote + 1:] - else: - # Unmatched single quote - collapsed += elided - break - - return collapsed - - -def FindEndOfExpressionInLine(line, startpos, stack): - """Find the position just after the end of current parenthesized expression. - - Args: - line: a CleansedLines line. - startpos: start searching at this position. - stack: nesting stack at startpos. - - Returns: - On finding matching end: (index just after matching end, None) - On finding an unclosed expression: (-1, None) - Otherwise: (-1, new stack at end of this line) - """ - for i in xrange(startpos, len(line)): - char = line[i] - if char in '([{': - # Found start of parenthesized expression, push to expression stack - stack.append(char) - elif char == '<': - # Found potential start of template argument list - if i > 0 and line[i - 1] == '<': - # Left shift operator - if stack and stack[-1] == '<': - stack.pop() - if not stack: - return (-1, None) - elif i > 0 and Search(r'\boperator\s*$', line[0:i]): - # operator<, don't add to stack - continue - else: - # Tentative start of template argument list - stack.append('<') - elif char in ')]}': - # Found end of parenthesized expression. - # - # If we are currently expecting a matching '>', the pending '<' - # must have been an operator. Remove them from expression stack. - while stack and stack[-1] == '<': - stack.pop() - if not stack: - return (-1, None) - if ((stack[-1] == '(' and char == ')') or - (stack[-1] == '[' and char == ']') or - (stack[-1] == '{' and char == '}')): - stack.pop() - if not stack: - return (i + 1, None) - else: - # Mismatched parentheses - return (-1, None) - elif char == '>': - # Found potential end of template argument list. - - # Ignore "->" and operator functions - if (i > 0 and - (line[i - 1] == '-' or Search(r'\boperator\s*$', line[0:i - 1]))): - continue - - # Pop the stack if there is a matching '<'. Otherwise, ignore - # this '>' since it must be an operator. - if stack: - if stack[-1] == '<': - stack.pop() - if not stack: - return (i + 1, None) - elif char == ';': - # Found something that look like end of statements. If we are currently - # expecting a '>', the matching '<' must have been an operator, since - # template argument list should not contain statements. - while stack and stack[-1] == '<': - stack.pop() - if not stack: - return (-1, None) - - # Did not find end of expression or unbalanced parentheses on this line - return (-1, stack) - - -def CloseExpression(clean_lines, linenum, pos): - """If input points to ( or { or [ or <, finds the position that closes it. - - If lines[linenum][pos] points to a '(' or '{' or '[' or '<', finds the - linenum/pos that correspond to the closing of the expression. - - TODO(unknown): cpplint spends a fair bit of time matching parentheses. - Ideally we would want to index all opening and closing parentheses once - and have CloseExpression be just a simple lookup, but due to preprocessor - tricks, this is not so easy. - - Args: - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - pos: A position on the line. - - Returns: - A tuple (line, linenum, pos) pointer *past* the closing brace, or - (line, len(lines), -1) if we never find a close. Note we ignore - strings and comments when matching; and the line we return is the - 'cleansed' line at linenum. - """ - - line = clean_lines.elided[linenum] - if (line[pos] not in '({[<') or Match(r'<[<=]', line[pos:]): - return (line, clean_lines.NumLines(), -1) - - # Check first line - (end_pos, stack) = FindEndOfExpressionInLine(line, pos, []) - if end_pos > -1: - return (line, linenum, end_pos) - - # Continue scanning forward - while stack and linenum < clean_lines.NumLines() - 1: - linenum += 1 - line = clean_lines.elided[linenum] - (end_pos, stack) = FindEndOfExpressionInLine(line, 0, stack) - if end_pos > -1: - return (line, linenum, end_pos) - - # Did not find end of expression before end of file, give up - return (line, clean_lines.NumLines(), -1) - - -def FindStartOfExpressionInLine(line, endpos, stack): - """Find position at the matching start of current expression. - - This is almost the reverse of FindEndOfExpressionInLine, but note - that the input position and returned position differs by 1. - - Args: - line: a CleansedLines line. - endpos: start searching at this position. - stack: nesting stack at endpos. - - Returns: - On finding matching start: (index at matching start, None) - On finding an unclosed expression: (-1, None) - Otherwise: (-1, new stack at beginning of this line) - """ - i = endpos - while i >= 0: - char = line[i] - if char in ')]}': - # Found end of expression, push to expression stack - stack.append(char) - elif char == '>': - # Found potential end of template argument list. - # - # Ignore it if it's a "->" or ">=" or "operator>" - if (i > 0 and - (line[i - 1] == '-' or - Match(r'\s>=\s', line[i - 1:]) or - Search(r'\boperator\s*$', line[0:i]))): - i -= 1 - else: - stack.append('>') - elif char == '<': - # Found potential start of template argument list - if i > 0 and line[i - 1] == '<': - # Left shift operator - i -= 1 - else: - # If there is a matching '>', we can pop the expression stack. - # Otherwise, ignore this '<' since it must be an operator. - if stack and stack[-1] == '>': - stack.pop() - if not stack: - return (i, None) - elif char in '([{': - # Found start of expression. - # - # If there are any unmatched '>' on the stack, they must be - # operators. Remove those. - while stack and stack[-1] == '>': - stack.pop() - if not stack: - return (-1, None) - if ((char == '(' and stack[-1] == ')') or - (char == '[' and stack[-1] == ']') or - (char == '{' and stack[-1] == '}')): - stack.pop() - if not stack: - return (i, None) - else: - # Mismatched parentheses - return (-1, None) - elif char == ';': - # Found something that look like end of statements. If we are currently - # expecting a '<', the matching '>' must have been an operator, since - # template argument list should not contain statements. - while stack and stack[-1] == '>': - stack.pop() - if not stack: - return (-1, None) - - i -= 1 - - return (-1, stack) - - -def ReverseCloseExpression(clean_lines, linenum, pos): - """If input points to ) or } or ] or >, finds the position that opens it. - - If lines[linenum][pos] points to a ')' or '}' or ']' or '>', finds the - linenum/pos that correspond to the opening of the expression. - - Args: - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - pos: A position on the line. - - Returns: - A tuple (line, linenum, pos) pointer *at* the opening brace, or - (line, 0, -1) if we never find the matching opening brace. Note - we ignore strings and comments when matching; and the line we - return is the 'cleansed' line at linenum. - """ - line = clean_lines.elided[linenum] - if line[pos] not in ')}]>': - return (line, 0, -1) - - # Check last line - (start_pos, stack) = FindStartOfExpressionInLine(line, pos, []) - if start_pos > -1: - return (line, linenum, start_pos) - - # Continue scanning backward - while stack and linenum > 0: - linenum -= 1 - line = clean_lines.elided[linenum] - (start_pos, stack) = FindStartOfExpressionInLine(line, len(line) - 1, stack) - if start_pos > -1: - return (line, linenum, start_pos) - - # Did not find start of expression before beginning of file, give up - return (line, 0, -1) - - -def CheckForCopyright(filename, lines, error): - """Logs an error if no Copyright message appears at the top of the file.""" - - # We'll say it should occur by line 10. Don't forget there's a - # placeholder line at the front. - for line in xrange(1, min(len(lines), 11)): - if re.search(r'Copyright', lines[line], re.I): break - else: # means no copyright line was found - error(filename, 0, 'legal/copyright', 5, - 'No copyright message found. ' - 'You should have a line: "Copyright [year] "') - - -def GetIndentLevel(line): - """Return the number of leading spaces in line. - - Args: - line: A string to check. - - Returns: - An integer count of leading spaces, possibly zero. - """ - indent = Match(r'^( *)\S', line) - if indent: - return len(indent.group(1)) - else: - return 0 - -def PathSplitToList(path): - """Returns the path split into a list by the separator. - - Args: - path: An absolute or relative path (e.g. '/a/b/c/' or '../a') - - Returns: - A list of path components (e.g. ['a', 'b', 'c]). - """ - lst = [] - while True: - (head, tail) = os.path.split(path) - if head == path: # absolute paths end - lst.append(head) - break - if tail == path: # relative paths end - lst.append(tail) - break - - path = head - lst.append(tail) - - lst.reverse() - return lst - -def GetHeaderGuardCPPVariable(filename): - """Returns the CPP variable that should be used as a header guard. - - Args: - filename: The name of a C++ header file. - - Returns: - The CPP variable that should be used as a header guard in the - named file. - - """ - - # Restores original filename in case that cpplint is invoked from Emacs's - # flymake. - filename = re.sub(r'_flymake\.h$', '.h', filename) - filename = re.sub(r'/\.flymake/([^/]*)$', r'/\1', filename) - # Replace 'c++' with 'cpp'. - filename = filename.replace('C++', 'cpp').replace('c++', 'cpp') - - fileinfo = FileInfo(filename) - file_path_from_root = fileinfo.RepositoryName() - - def FixupPathFromRoot(): - if _root_debug: - sys.stderr.write("\n_root fixup, _root = '%s', repository name = '%s'\n" - % (_root, fileinfo.RepositoryName())) - - # Process the file path with the --root flag if it was set. - if not _root: - if _root_debug: - sys.stderr.write("_root unspecified\n") - return file_path_from_root - - def StripListPrefix(lst, prefix): - # f(['x', 'y'], ['w, z']) -> None (not a valid prefix) - if lst[:len(prefix)] != prefix: - return None - # f(['a, 'b', 'c', 'd'], ['a', 'b']) -> ['c', 'd'] - return lst[(len(prefix)):] - - # root behavior: - # --root=subdir , lstrips subdir from the header guard - maybe_path = StripListPrefix(PathSplitToList(file_path_from_root), - PathSplitToList(_root)) - - if _root_debug: - sys.stderr.write(("_root lstrip (maybe_path=%s, file_path_from_root=%s," + - " _root=%s)\n") % (maybe_path, file_path_from_root, _root)) - - if maybe_path: - return os.path.join(*maybe_path) - - # --root=.. , will prepend the outer directory to the header guard - full_path = fileinfo.FullName() - # adapt slashes for windows - root_abspath = os.path.abspath(_root).replace('\\', '/') - - maybe_path = StripListPrefix(PathSplitToList(full_path), - PathSplitToList(root_abspath)) - - if _root_debug: - sys.stderr.write(("_root prepend (maybe_path=%s, full_path=%s, " + - "root_abspath=%s)\n") % (maybe_path, full_path, root_abspath)) - - if maybe_path: - return os.path.join(*maybe_path) - - if _root_debug: - sys.stderr.write("_root ignore, returning %s\n" % (file_path_from_root)) - - # --root=FAKE_DIR is ignored - return file_path_from_root - - file_path_from_root = FixupPathFromRoot() - return re.sub(r'[^a-zA-Z0-9]', '_', file_path_from_root).upper() + '_' - - -def CheckForHeaderGuard(filename, clean_lines, error): - """Checks that the file contains a header guard. - - Logs an error if no #ifndef header guard is present. For other - headers, checks that the full pathname is used. - - Args: - filename: The name of the C++ header file. - clean_lines: A CleansedLines instance containing the file. - error: The function to call with any errors found. - """ - - # Don't check for header guards if there are error suppression - # comments somewhere in this file. - # - # Because this is silencing a warning for a nonexistent line, we - # only support the very specific NOLINT(build/header_guard) syntax, - # and not the general NOLINT or NOLINT(*) syntax. - raw_lines = clean_lines.lines_without_raw_strings - for i in raw_lines: - if Search(r'//\s*NOLINT\(build/header_guard\)', i): - return - - # Allow pragma once instead of header guards - for i in raw_lines: - if Search(r'^\s*#pragma\s+once', i): - return - - cppvar = GetHeaderGuardCPPVariable(filename) - - ifndef = '' - ifndef_linenum = 0 - define = '' - endif = '' - endif_linenum = 0 - for linenum, line in enumerate(raw_lines): - linesplit = line.split() - if len(linesplit) >= 2: - # find the first occurrence of #ifndef and #define, save arg - if not ifndef and linesplit[0] == '#ifndef': - # set ifndef to the header guard presented on the #ifndef line. - ifndef = linesplit[1] - ifndef_linenum = linenum - if not define and linesplit[0] == '#define': - define = linesplit[1] - # find the last occurrence of #endif, save entire line - if line.startswith('#endif'): - endif = line - endif_linenum = linenum - - if not ifndef or not define or ifndef != define: - error(filename, 0, 'build/header_guard', 5, - 'No #ifndef header guard found, suggested CPP variable is: %s' % - cppvar) - return - - # The guard should be PATH_FILE_H_, but we also allow PATH_FILE_H__ - # for backward compatibility. - if ifndef != cppvar: - error_level = 0 - if ifndef != cppvar + '_': - error_level = 5 - - ParseNolintSuppressions(filename, raw_lines[ifndef_linenum], ifndef_linenum, - error) - error(filename, ifndef_linenum, 'build/header_guard', error_level, - '#ifndef header guard has wrong style, please use: %s' % cppvar) - - # Check for "//" comments on endif line. - ParseNolintSuppressions(filename, raw_lines[endif_linenum], endif_linenum, - error) - match = Match(r'#endif\s*//\s*' + cppvar + r'(_)?\b', endif) - if match: - if match.group(1) == '_': - # Issue low severity warning for deprecated double trailing underscore - error(filename, endif_linenum, 'build/header_guard', 0, - '#endif line should be "#endif // %s"' % cppvar) - return - - # Didn't find the corresponding "//" comment. If this file does not - # contain any "//" comments at all, it could be that the compiler - # only wants "/**/" comments, look for those instead. - no_single_line_comments = True - for i in xrange(1, len(raw_lines) - 1): - line = raw_lines[i] - if Match(r'^(?:(?:\'(?:\.|[^\'])*\')|(?:"(?:\.|[^"])*")|[^\'"])*//', line): - no_single_line_comments = False - break - - if no_single_line_comments: - match = Match(r'#endif\s*/\*\s*' + cppvar + r'(_)?\s*\*/', endif) - if match: - if match.group(1) == '_': - # Low severity warning for double trailing underscore - error(filename, endif_linenum, 'build/header_guard', 0, - '#endif line should be "#endif /* %s */"' % cppvar) - return - - # Didn't find anything - error(filename, endif_linenum, 'build/header_guard', 5, - '#endif line should be "#endif // %s"' % cppvar) - - -def CheckHeaderFileIncluded(filename, include_state, error): - """Logs an error if a source file does not include its header.""" - - # Do not check test files - fileinfo = FileInfo(filename) - if Search(_TEST_FILE_SUFFIX, fileinfo.BaseName()): - return - - for ext in GetHeaderExtensions(): - basefilename = filename[0:len(filename) - len(fileinfo.Extension())] - headerfile = basefilename + '.' + ext - if not os.path.exists(headerfile): - continue - headername = FileInfo(headerfile).RepositoryName() - first_include = None - include_uses_unix_dir_aliases = False - for section_list in include_state.include_list: - for f in section_list: - include_text = f[0] - if "./" in include_text: - include_uses_unix_dir_aliases = True - if headername in include_text or include_text in headername: - return - if not first_include: - first_include = f[1] - - message = '%s should include its header file %s' % (fileinfo.RepositoryName(), headername) - if include_uses_unix_dir_aliases: - message += ". Relative paths like . and .. are not allowed." - - error(filename, first_include, 'build/include', 5, message) - - -def CheckForBadCharacters(filename, lines, error): - """Logs an error for each line containing bad characters. - - Two kinds of bad characters: - - 1. Unicode replacement characters: These indicate that either the file - contained invalid UTF-8 (likely) or Unicode replacement characters (which - it shouldn't). Note that it's possible for this to throw off line - numbering if the invalid UTF-8 occurred adjacent to a newline. - - 2. NUL bytes. These are problematic for some tools. - - Args: - filename: The name of the current file. - lines: An array of strings, each representing a line of the file. - error: The function to call with any errors found. - """ - for linenum, line in enumerate(lines): - if unicode_escape_decode('\ufffd') in line: - error(filename, linenum, 'readability/utf8', 5, - 'Line contains invalid UTF-8 (or Unicode replacement character).') - if '\0' in line: - error(filename, linenum, 'readability/nul', 5, 'Line contains NUL byte.') - - -def CheckForNewlineAtEOF(filename, lines, error): - """Logs an error if there is no newline char at the end of the file. - - Args: - filename: The name of the current file. - lines: An array of strings, each representing a line of the file. - error: The function to call with any errors found. - """ - - # The array lines() was created by adding two newlines to the - # original file (go figure), then splitting on \n. - # To verify that the file ends in \n, we just have to make sure the - # last-but-two element of lines() exists and is empty. - if len(lines) < 3 or lines[-2]: - error(filename, len(lines) - 2, 'whitespace/ending_newline', 5, - 'Could not find a newline character at the end of the file.') - - -def CheckForMultilineCommentsAndStrings(filename, clean_lines, linenum, error): - """Logs an error if we see /* ... */ or "..." that extend past one line. - - /* ... */ comments are legit inside macros, for one line. - Otherwise, we prefer // comments, so it's ok to warn about the - other. Likewise, it's ok for strings to extend across multiple - lines, as long as a line continuation character (backslash) - terminates each line. Although not currently prohibited by the C++ - style guide, it's ugly and unnecessary. We don't do well with either - in this lint program, so we warn about both. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ - line = clean_lines.elided[linenum] - - # Remove all \\ (escaped backslashes) from the line. They are OK, and the - # second (escaped) slash may trigger later \" detection erroneously. - line = line.replace('\\\\', '') - - if line.count('/*') > line.count('*/'): - error(filename, linenum, 'readability/multiline_comment', 5, - 'Complex multi-line /*...*/-style comment found. ' - 'Lint may give bogus warnings. ' - 'Consider replacing these with //-style comments, ' - 'with #if 0...#endif, ' - 'or with more clearly structured multi-line comments.') - - if (line.count('"') - line.count('\\"')) % 2: - error(filename, linenum, 'readability/multiline_string', 5, - 'Multi-line string ("...") found. This lint script doesn\'t ' - 'do well with such strings, and may give bogus warnings. ' - 'Use C++11 raw strings or concatenation instead.') - - -# (non-threadsafe name, thread-safe alternative, validation pattern) -# -# The validation pattern is used to eliminate false positives such as: -# _rand(); // false positive due to substring match. -# ->rand(); // some member function rand(). -# ACMRandom rand(seed); // some variable named rand. -# ISAACRandom rand(); // another variable named rand. -# -# Basically we require the return value of these functions to be used -# in some expression context on the same line by matching on some -# operator before the function name. This eliminates constructors and -# member function calls. -_UNSAFE_FUNC_PREFIX = r'(?:[-+*/=%^&|(<]\s*|>\s+)' -_THREADING_LIST = ( - ('asctime(', 'asctime_r(', _UNSAFE_FUNC_PREFIX + r'asctime\([^)]+\)'), - ('ctime(', 'ctime_r(', _UNSAFE_FUNC_PREFIX + r'ctime\([^)]+\)'), - ('getgrgid(', 'getgrgid_r(', _UNSAFE_FUNC_PREFIX + r'getgrgid\([^)]+\)'), - ('getgrnam(', 'getgrnam_r(', _UNSAFE_FUNC_PREFIX + r'getgrnam\([^)]+\)'), - ('getlogin(', 'getlogin_r(', _UNSAFE_FUNC_PREFIX + r'getlogin\(\)'), - ('getpwnam(', 'getpwnam_r(', _UNSAFE_FUNC_PREFIX + r'getpwnam\([^)]+\)'), - ('getpwuid(', 'getpwuid_r(', _UNSAFE_FUNC_PREFIX + r'getpwuid\([^)]+\)'), - ('gmtime(', 'gmtime_r(', _UNSAFE_FUNC_PREFIX + r'gmtime\([^)]+\)'), - ('localtime(', 'localtime_r(', _UNSAFE_FUNC_PREFIX + r'localtime\([^)]+\)'), - ('rand(', 'rand_r(', _UNSAFE_FUNC_PREFIX + r'rand\(\)'), - ('strtok(', 'strtok_r(', - _UNSAFE_FUNC_PREFIX + r'strtok\([^)]+\)'), - ('ttyname(', 'ttyname_r(', _UNSAFE_FUNC_PREFIX + r'ttyname\([^)]+\)'), - ) - - -def CheckPosixThreading(filename, clean_lines, linenum, error): - """Checks for calls to thread-unsafe functions. - - Much code has been originally written without consideration of - multi-threading. Also, engineers are relying on their old experience; - they have learned posix before threading extensions were added. These - tests guide the engineers to use thread-safe functions (when using - posix directly). - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ - line = clean_lines.elided[linenum] - for single_thread_func, multithread_safe_func, pattern in _THREADING_LIST: - # Additional pattern matching check to confirm that this is the - # function we are looking for - if Search(pattern, line): - error(filename, linenum, 'runtime/threadsafe_fn', 2, - 'Consider using ' + multithread_safe_func + - '...) instead of ' + single_thread_func + - '...) for improved thread safety.') - - -def CheckVlogArguments(filename, clean_lines, linenum, error): - """Checks that VLOG() is only used for defining a logging level. - - For example, VLOG(2) is correct. VLOG(INFO), VLOG(WARNING), VLOG(ERROR), and - VLOG(FATAL) are not. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ - line = clean_lines.elided[linenum] - if Search(r'\bVLOG\((INFO|ERROR|WARNING|DFATAL|FATAL)\)', line): - error(filename, linenum, 'runtime/vlog', 5, - 'VLOG() should be used with numeric verbosity level. ' - 'Use LOG() if you want symbolic severity levels.') - -# Matches invalid increment: *count++, which moves pointer instead of -# incrementing a value. -_RE_PATTERN_INVALID_INCREMENT = re.compile( - r'^\s*\*\w+(\+\+|--);') - - -def CheckInvalidIncrement(filename, clean_lines, linenum, error): - """Checks for invalid increment *count++. - - For example following function: - void increment_counter(int* count) { - *count++; - } - is invalid, because it effectively does count++, moving pointer, and should - be replaced with ++*count, (*count)++ or *count += 1. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ - line = clean_lines.elided[linenum] - if _RE_PATTERN_INVALID_INCREMENT.match(line): - error(filename, linenum, 'runtime/invalid_increment', 5, - 'Changing pointer instead of value (or unused value of operator*).') - - -def IsMacroDefinition(clean_lines, linenum): - if Search(r'^#define', clean_lines[linenum]): - return True - - if linenum > 0 and Search(r'\\$', clean_lines[linenum - 1]): - return True - - return False - - -def IsForwardClassDeclaration(clean_lines, linenum): - return Match(r'^\s*(\btemplate\b)*.*class\s+\w+;\s*$', clean_lines[linenum]) - - -class _BlockInfo(object): - """Stores information about a generic block of code.""" - - def __init__(self, linenum, seen_open_brace): - self.starting_linenum = linenum - self.seen_open_brace = seen_open_brace - self.open_parentheses = 0 - self.inline_asm = _NO_ASM - self.check_namespace_indentation = False - - def CheckBegin(self, filename, clean_lines, linenum, error): - """Run checks that applies to text up to the opening brace. - - This is mostly for checking the text after the class identifier - and the "{", usually where the base class is specified. For other - blocks, there isn't much to check, so we always pass. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ - pass - - def CheckEnd(self, filename, clean_lines, linenum, error): - """Run checks that applies to text after the closing brace. - - This is mostly used for checking end of namespace comments. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ - pass - - def IsBlockInfo(self): - """Returns true if this block is a _BlockInfo. - - This is convenient for verifying that an object is an instance of - a _BlockInfo, but not an instance of any of the derived classes. - - Returns: - True for this class, False for derived classes. - """ - return self.__class__ == _BlockInfo - - -class _ExternCInfo(_BlockInfo): - """Stores information about an 'extern "C"' block.""" - - def __init__(self, linenum): - _BlockInfo.__init__(self, linenum, True) - - -class _ClassInfo(_BlockInfo): - """Stores information about a class.""" - - def __init__(self, name, class_or_struct, clean_lines, linenum): - _BlockInfo.__init__(self, linenum, False) - self.name = name - self.is_derived = False - self.check_namespace_indentation = True - if class_or_struct == 'struct': - self.access = 'public' - self.is_struct = True - else: - self.access = 'private' - self.is_struct = False - - # Remember initial indentation level for this class. Using raw_lines here - # instead of elided to account for leading comments. - self.class_indent = GetIndentLevel(clean_lines.raw_lines[linenum]) - - # Try to find the end of the class. This will be confused by things like: - # class A { - # } *x = { ... - # - # But it's still good enough for CheckSectionSpacing. - self.last_line = 0 - depth = 0 - for i in range(linenum, clean_lines.NumLines()): - line = clean_lines.elided[i] - depth += line.count('{') - line.count('}') - if not depth: - self.last_line = i - break - - def CheckBegin(self, filename, clean_lines, linenum, error): - # Look for a bare ':' - if Search('(^|[^:]):($|[^:])', clean_lines.elided[linenum]): - self.is_derived = True - - def CheckEnd(self, filename, clean_lines, linenum, error): - # If there is a DISALLOW macro, it should appear near the end of - # the class. - seen_last_thing_in_class = False - for i in xrange(linenum - 1, self.starting_linenum, -1): - match = Search( - r'\b(DISALLOW_COPY_AND_ASSIGN|DISALLOW_IMPLICIT_CONSTRUCTORS)\(' + - self.name + r'\)', - clean_lines.elided[i]) - if match: - if seen_last_thing_in_class: - error(filename, i, 'readability/constructors', 3, - match.group(1) + ' should be the last thing in the class') - break - - if not Match(r'^\s*$', clean_lines.elided[i]): - seen_last_thing_in_class = True - - # Check that closing brace is aligned with beginning of the class. - # Only do this if the closing brace is indented by only whitespaces. - # This means we will not check single-line class definitions. - indent = Match(r'^( *)\}', clean_lines.elided[linenum]) - if indent and len(indent.group(1)) != self.class_indent: - if self.is_struct: - parent = 'struct ' + self.name - else: - parent = 'class ' + self.name - error(filename, linenum, 'whitespace/indent', 3, - 'Closing brace should be aligned with beginning of %s' % parent) - - -class _NamespaceInfo(_BlockInfo): - """Stores information about a namespace.""" - - def __init__(self, name, linenum): - _BlockInfo.__init__(self, linenum, False) - self.name = name or '' - self.check_namespace_indentation = True - - def CheckEnd(self, filename, clean_lines, linenum, error): - """Check end of namespace comments.""" - line = clean_lines.raw_lines[linenum] - - # Check how many lines is enclosed in this namespace. Don't issue - # warning for missing namespace comments if there aren't enough - # lines. However, do apply checks if there is already an end of - # namespace comment and it's incorrect. - # - # TODO(unknown): We always want to check end of namespace comments - # if a namespace is large, but sometimes we also want to apply the - # check if a short namespace contained nontrivial things (something - # other than forward declarations). There is currently no logic on - # deciding what these nontrivial things are, so this check is - # triggered by namespace size only, which works most of the time. - if (linenum - self.starting_linenum < 10 - and not Match(r'^\s*};*\s*(//|/\*).*\bnamespace\b', line)): - return - - # Look for matching comment at end of namespace. - # - # Note that we accept C style "/* */" comments for terminating - # namespaces, so that code that terminate namespaces inside - # preprocessor macros can be cpplint clean. - # - # We also accept stuff like "// end of namespace ." with the - # period at the end. - # - # Besides these, we don't accept anything else, otherwise we might - # get false negatives when existing comment is a substring of the - # expected namespace. - if self.name: - # Named namespace - if not Match((r'^\s*};*\s*(//|/\*).*\bnamespace\s+' + - re.escape(self.name) + r'[\*/\.\\\s]*$'), - line): - error(filename, linenum, 'readability/namespace', 5, - 'Namespace should be terminated with "// namespace %s"' % - self.name) - else: - # Anonymous namespace - if not Match(r'^\s*};*\s*(//|/\*).*\bnamespace[\*/\.\\\s]*$', line): - # If "// namespace anonymous" or "// anonymous namespace (more text)", - # mention "// anonymous namespace" as an acceptable form - if Match(r'^\s*}.*\b(namespace anonymous|anonymous namespace)\b', line): - error(filename, linenum, 'readability/namespace', 5, - 'Anonymous namespace should be terminated with "// namespace"' - ' or "// anonymous namespace"') - else: - error(filename, linenum, 'readability/namespace', 5, - 'Anonymous namespace should be terminated with "// namespace"') - - -class _PreprocessorInfo(object): - """Stores checkpoints of nesting stacks when #if/#else is seen.""" - - def __init__(self, stack_before_if): - # The entire nesting stack before #if - self.stack_before_if = stack_before_if - - # The entire nesting stack up to #else - self.stack_before_else = [] - - # Whether we have already seen #else or #elif - self.seen_else = False - - -class NestingState(object): - """Holds states related to parsing braces.""" - - def __init__(self): - # Stack for tracking all braces. An object is pushed whenever we - # see a "{", and popped when we see a "}". Only 3 types of - # objects are possible: - # - _ClassInfo: a class or struct. - # - _NamespaceInfo: a namespace. - # - _BlockInfo: some other type of block. - self.stack = [] - - # Top of the previous stack before each Update(). - # - # Because the nesting_stack is updated at the end of each line, we - # had to do some convoluted checks to find out what is the current - # scope at the beginning of the line. This check is simplified by - # saving the previous top of nesting stack. - # - # We could save the full stack, but we only need the top. Copying - # the full nesting stack would slow down cpplint by ~10%. - self.previous_stack_top = [] - - # Stack of _PreprocessorInfo objects. - self.pp_stack = [] - - def SeenOpenBrace(self): - """Check if we have seen the opening brace for the innermost block. - - Returns: - True if we have seen the opening brace, False if the innermost - block is still expecting an opening brace. - """ - return (not self.stack) or self.stack[-1].seen_open_brace - - def InNamespaceBody(self): - """Check if we are currently one level inside a namespace body. - - Returns: - True if top of the stack is a namespace block, False otherwise. - """ - return self.stack and isinstance(self.stack[-1], _NamespaceInfo) - - def InExternC(self): - """Check if we are currently one level inside an 'extern "C"' block. - - Returns: - True if top of the stack is an extern block, False otherwise. - """ - return self.stack and isinstance(self.stack[-1], _ExternCInfo) - - def InClassDeclaration(self): - """Check if we are currently one level inside a class or struct declaration. - - Returns: - True if top of the stack is a class/struct, False otherwise. - """ - return self.stack and isinstance(self.stack[-1], _ClassInfo) - - def InAsmBlock(self): - """Check if we are currently one level inside an inline ASM block. - - Returns: - True if the top of the stack is a block containing inline ASM. - """ - return self.stack and self.stack[-1].inline_asm != _NO_ASM - - def InTemplateArgumentList(self, clean_lines, linenum, pos): - """Check if current position is inside template argument list. - - Args: - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - pos: position just after the suspected template argument. - Returns: - True if (linenum, pos) is inside template arguments. - """ - while linenum < clean_lines.NumLines(): - # Find the earliest character that might indicate a template argument - line = clean_lines.elided[linenum] - match = Match(r'^[^{};=\[\]\.<>]*(.)', line[pos:]) - if not match: - linenum += 1 - pos = 0 - continue - token = match.group(1) - pos += len(match.group(0)) - - # These things do not look like template argument list: - # class Suspect { - # class Suspect x; } - if token in ('{', '}', ';'): return False - - # These things look like template argument list: - # template - # template - # template - # template - if token in ('>', '=', '[', ']', '.'): return True - - # Check if token is an unmatched '<'. - # If not, move on to the next character. - if token != '<': - pos += 1 - if pos >= len(line): - linenum += 1 - pos = 0 - continue - - # We can't be sure if we just find a single '<', and need to - # find the matching '>'. - (_, end_line, end_pos) = CloseExpression(clean_lines, linenum, pos - 1) - if end_pos < 0: - # Not sure if template argument list or syntax error in file - return False - linenum = end_line - pos = end_pos - return False - - def UpdatePreprocessor(self, line): - """Update preprocessor stack. - - We need to handle preprocessors due to classes like this: - #ifdef SWIG - struct ResultDetailsPageElementExtensionPoint { - #else - struct ResultDetailsPageElementExtensionPoint : public Extension { - #endif - - We make the following assumptions (good enough for most files): - - Preprocessor condition evaluates to true from #if up to first - #else/#elif/#endif. - - - Preprocessor condition evaluates to false from #else/#elif up - to #endif. We still perform lint checks on these lines, but - these do not affect nesting stack. - - Args: - line: current line to check. - """ - if Match(r'^\s*#\s*(if|ifdef|ifndef)\b', line): - # Beginning of #if block, save the nesting stack here. The saved - # stack will allow us to restore the parsing state in the #else case. - self.pp_stack.append(_PreprocessorInfo(copy.deepcopy(self.stack))) - elif Match(r'^\s*#\s*(else|elif)\b', line): - # Beginning of #else block - if self.pp_stack: - if not self.pp_stack[-1].seen_else: - # This is the first #else or #elif block. Remember the - # whole nesting stack up to this point. This is what we - # keep after the #endif. - self.pp_stack[-1].seen_else = True - self.pp_stack[-1].stack_before_else = copy.deepcopy(self.stack) - - # Restore the stack to how it was before the #if - self.stack = copy.deepcopy(self.pp_stack[-1].stack_before_if) - else: - # TODO(unknown): unexpected #else, issue warning? - pass - elif Match(r'^\s*#\s*endif\b', line): - # End of #if or #else blocks. - if self.pp_stack: - # If we saw an #else, we will need to restore the nesting - # stack to its former state before the #else, otherwise we - # will just continue from where we left off. - if self.pp_stack[-1].seen_else: - # Here we can just use a shallow copy since we are the last - # reference to it. - self.stack = self.pp_stack[-1].stack_before_else - # Drop the corresponding #if - self.pp_stack.pop() - else: - # TODO(unknown): unexpected #endif, issue warning? - pass - - # TODO(unknown): Update() is too long, but we will refactor later. - def Update(self, filename, clean_lines, linenum, error): - """Update nesting state with current line. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ - line = clean_lines.elided[linenum] - - # Remember top of the previous nesting stack. - # - # The stack is always pushed/popped and not modified in place, so - # we can just do a shallow copy instead of copy.deepcopy. Using - # deepcopy would slow down cpplint by ~28%. - if self.stack: - self.previous_stack_top = self.stack[-1] - else: - self.previous_stack_top = None - - # Update pp_stack - self.UpdatePreprocessor(line) - - # Count parentheses. This is to avoid adding struct arguments to - # the nesting stack. - if self.stack: - inner_block = self.stack[-1] - depth_change = line.count('(') - line.count(')') - inner_block.open_parentheses += depth_change - - # Also check if we are starting or ending an inline assembly block. - if inner_block.inline_asm in (_NO_ASM, _END_ASM): - if (depth_change != 0 and - inner_block.open_parentheses == 1 and - _MATCH_ASM.match(line)): - # Enter assembly block - inner_block.inline_asm = _INSIDE_ASM - else: - # Not entering assembly block. If previous line was _END_ASM, - # we will now shift to _NO_ASM state. - inner_block.inline_asm = _NO_ASM - elif (inner_block.inline_asm == _INSIDE_ASM and - inner_block.open_parentheses == 0): - # Exit assembly block - inner_block.inline_asm = _END_ASM - - # Consume namespace declaration at the beginning of the line. Do - # this in a loop so that we catch same line declarations like this: - # namespace proto2 { namespace bridge { class MessageSet; } } - while True: - # Match start of namespace. The "\b\s*" below catches namespace - # declarations even if it weren't followed by a whitespace, this - # is so that we don't confuse our namespace checker. The - # missing spaces will be flagged by CheckSpacing. - namespace_decl_match = Match(r'^\s*namespace\b\s*([:\w]+)?(.*)$', line) - if not namespace_decl_match: - break - - new_namespace = _NamespaceInfo(namespace_decl_match.group(1), linenum) - self.stack.append(new_namespace) - - line = namespace_decl_match.group(2) - if line.find('{') != -1: - new_namespace.seen_open_brace = True - line = line[line.find('{') + 1:] - - # Look for a class declaration in whatever is left of the line - # after parsing namespaces. The regexp accounts for decorated classes - # such as in: - # class LOCKABLE API Object { - # }; - class_decl_match = Match( - r'^(\s*(?:template\s*<[\w\s<>,:=]*>\s*)?' - r'(class|struct)\s+(?:[a-zA-Z0-9_]+\s+)*(\w+(?:::\w+)*))' - r'(.*)$', line) - if (class_decl_match and - (not self.stack or self.stack[-1].open_parentheses == 0)): - # We do not want to accept classes that are actually template arguments: - # template , - # template class Ignore3> - # void Function() {}; - # - # To avoid template argument cases, we scan forward and look for - # an unmatched '>'. If we see one, assume we are inside a - # template argument list. - end_declaration = len(class_decl_match.group(1)) - if not self.InTemplateArgumentList(clean_lines, linenum, end_declaration): - self.stack.append(_ClassInfo( - class_decl_match.group(3), class_decl_match.group(2), - clean_lines, linenum)) - line = class_decl_match.group(4) - - # If we have not yet seen the opening brace for the innermost block, - # run checks here. - if not self.SeenOpenBrace(): - self.stack[-1].CheckBegin(filename, clean_lines, linenum, error) - - # Update access control if we are inside a class/struct - if self.stack and isinstance(self.stack[-1], _ClassInfo): - classinfo = self.stack[-1] - access_match = Match( - r'^(.*)\b(public|private|protected|signals)(\s+(?:slots\s*)?)?' - r':(?:[^:]|$)', - line) - if access_match: - classinfo.access = access_match.group(2) - - # Check that access keywords are indented +1 space. Skip this - # check if the keywords are not preceded by whitespaces. - indent = access_match.group(1) - if (len(indent) != classinfo.class_indent + 1 and - Match(r'^\s*$', indent)): - if classinfo.is_struct: - parent = 'struct ' + classinfo.name - else: - parent = 'class ' + classinfo.name - slots = '' - if access_match.group(3): - slots = access_match.group(3) - error(filename, linenum, 'whitespace/indent', 3, - '%s%s: should be indented +1 space inside %s' % ( - access_match.group(2), slots, parent)) - - # Consume braces or semicolons from what's left of the line - while True: - # Match first brace, semicolon, or closed parenthesis. - matched = Match(r'^[^{;)}]*([{;)}])(.*)$', line) - if not matched: - break - - token = matched.group(1) - if token == '{': - # If namespace or class hasn't seen a opening brace yet, mark - # namespace/class head as complete. Push a new block onto the - # stack otherwise. - if not self.SeenOpenBrace(): - self.stack[-1].seen_open_brace = True - elif Match(r'^extern\s*"[^"]*"\s*\{', line): - self.stack.append(_ExternCInfo(linenum)) - else: - self.stack.append(_BlockInfo(linenum, True)) - if _MATCH_ASM.match(line): - self.stack[-1].inline_asm = _BLOCK_ASM - - elif token == ';' or token == ')': - # If we haven't seen an opening brace yet, but we already saw - # a semicolon, this is probably a forward declaration. Pop - # the stack for these. - # - # Similarly, if we haven't seen an opening brace yet, but we - # already saw a closing parenthesis, then these are probably - # function arguments with extra "class" or "struct" keywords. - # Also pop these stack for these. - if not self.SeenOpenBrace(): - self.stack.pop() - else: # token == '}' - # Perform end of block checks and pop the stack. - if self.stack: - self.stack[-1].CheckEnd(filename, clean_lines, linenum, error) - self.stack.pop() - line = matched.group(2) - - def InnermostClass(self): - """Get class info on the top of the stack. - - Returns: - A _ClassInfo object if we are inside a class, or None otherwise. - """ - for i in range(len(self.stack), 0, -1): - classinfo = self.stack[i - 1] - if isinstance(classinfo, _ClassInfo): - return classinfo - return None - - def CheckCompletedBlocks(self, filename, error): - """Checks that all classes and namespaces have been completely parsed. - - Call this when all lines in a file have been processed. - Args: - filename: The name of the current file. - error: The function to call with any errors found. - """ - # Note: This test can result in false positives if #ifdef constructs - # get in the way of brace matching. See the testBuildClass test in - # cpplint_unittest.py for an example of this. - for obj in self.stack: - if isinstance(obj, _ClassInfo): - error(filename, obj.starting_linenum, 'build/class', 5, - 'Failed to find complete declaration of class %s' % - obj.name) - elif isinstance(obj, _NamespaceInfo): - error(filename, obj.starting_linenum, 'build/namespaces', 5, - 'Failed to find complete declaration of namespace %s' % - obj.name) - - -def CheckForNonStandardConstructs(filename, clean_lines, linenum, - nesting_state, error): - r"""Logs an error if we see certain non-ANSI constructs ignored by gcc-2. - - Complain about several constructs which gcc-2 accepts, but which are - not standard C++. Warning about these in lint is one way to ease the - transition to new compilers. - - put storage class first (e.g. "static const" instead of "const static"). - - "%lld" instead of %qd" in printf-type functions. - - "%1$d" is non-standard in printf-type functions. - - "\%" is an undefined character escape sequence. - - text after #endif is not allowed. - - invalid inner-style forward declaration. - - >? and ?= and )\?=?\s*(\w+|[+-]?\d+)(\.\d*)?', - line): - error(filename, linenum, 'build/deprecated', 3, - '>? and ))?' - # r'\s*const\s*' + type_name + '\s*&\s*\w+\s*;' - error(filename, linenum, 'runtime/member_string_references', 2, - 'const string& members are dangerous. It is much better to use ' - 'alternatives, such as pointers or simple constants.') - - # Everything else in this function operates on class declarations. - # Return early if the top of the nesting stack is not a class, or if - # the class head is not completed yet. - classinfo = nesting_state.InnermostClass() - if not classinfo or not classinfo.seen_open_brace: - return - - # The class may have been declared with namespace or classname qualifiers. - # The constructor and destructor will not have those qualifiers. - base_classname = classinfo.name.split('::')[-1] - - # Look for single-argument constructors that aren't marked explicit. - # Technically a valid construct, but against style. - explicit_constructor_match = Match( - r'\s+(?:(?:inline|constexpr)\s+)*(explicit\s+)?' - r'(?:(?:inline|constexpr)\s+)*%s\s*' - r'\(((?:[^()]|\([^()]*\))*)\)' - % re.escape(base_classname), - line) - - if explicit_constructor_match: - is_marked_explicit = explicit_constructor_match.group(1) - - if not explicit_constructor_match.group(2): - constructor_args = [] - else: - constructor_args = explicit_constructor_match.group(2).split(',') - - # collapse arguments so that commas in template parameter lists and function - # argument parameter lists don't split arguments in two - i = 0 - while i < len(constructor_args): - constructor_arg = constructor_args[i] - while (constructor_arg.count('<') > constructor_arg.count('>') or - constructor_arg.count('(') > constructor_arg.count(')')): - constructor_arg += ',' + constructor_args[i + 1] - del constructor_args[i + 1] - constructor_args[i] = constructor_arg - i += 1 - - variadic_args = [arg for arg in constructor_args if '&&...' in arg] - defaulted_args = [arg for arg in constructor_args if '=' in arg] - noarg_constructor = (not constructor_args or # empty arg list - # 'void' arg specifier - (len(constructor_args) == 1 and - constructor_args[0].strip() == 'void')) - onearg_constructor = ((len(constructor_args) == 1 and # exactly one arg - not noarg_constructor) or - # all but at most one arg defaulted - (len(constructor_args) >= 1 and - not noarg_constructor and - len(defaulted_args) >= len(constructor_args) - 1) or - # variadic arguments with zero or one argument - (len(constructor_args) <= 2 and - len(variadic_args) >= 1)) - initializer_list_constructor = bool( - onearg_constructor and - Search(r'\bstd\s*::\s*initializer_list\b', constructor_args[0])) - copy_constructor = bool( - onearg_constructor and - Match(r'((const\s+(volatile\s+)?)?|(volatile\s+(const\s+)?))?' - r'%s(\s*<[^>]*>)?(\s+const)?\s*(?:<\w+>\s*)?&' - % re.escape(base_classname), constructor_args[0].strip())) - - if (not is_marked_explicit and - onearg_constructor and - not initializer_list_constructor and - not copy_constructor): - if defaulted_args or variadic_args: - error(filename, linenum, 'runtime/explicit', 5, - 'Constructors callable with one argument ' - 'should be marked explicit.') - else: - error(filename, linenum, 'runtime/explicit', 5, - 'Single-parameter constructors should be marked explicit.') - elif is_marked_explicit and not onearg_constructor: - if noarg_constructor: - error(filename, linenum, 'runtime/explicit', 5, - 'Zero-parameter constructors should not be marked explicit.') - - -def CheckSpacingForFunctionCall(filename, clean_lines, linenum, error): - """Checks for the correctness of various spacing around function calls. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ - line = clean_lines.elided[linenum] - - # Since function calls often occur inside if/for/while/switch - # expressions - which have their own, more liberal conventions - we - # first see if we should be looking inside such an expression for a - # function call, to which we can apply more strict standards. - fncall = line # if there's no control flow construct, look at whole line - for pattern in (r'\bif\s*\((.*)\)\s*{', - r'\bfor\s*\((.*)\)\s*{', - r'\bwhile\s*\((.*)\)\s*[{;]', - r'\bswitch\s*\((.*)\)\s*{'): - match = Search(pattern, line) - if match: - fncall = match.group(1) # look inside the parens for function calls - break - - # Except in if/for/while/switch, there should never be space - # immediately inside parens (eg "f( 3, 4 )"). We make an exception - # for nested parens ( (a+b) + c ). Likewise, there should never be - # a space before a ( when it's a function argument. I assume it's a - # function argument when the char before the whitespace is legal in - # a function name (alnum + _) and we're not starting a macro. Also ignore - # pointers and references to arrays and functions coz they're too tricky: - # we use a very simple way to recognize these: - # " (something)(maybe-something)" or - # " (something)(maybe-something," or - # " (something)[something]" - # Note that we assume the contents of [] to be short enough that - # they'll never need to wrap. - if ( # Ignore control structures. - not Search(r'\b(if|elif|for|while|switch|return|new|delete|catch|sizeof)\b', - fncall) and - # Ignore pointers/references to functions. - not Search(r' \([^)]+\)\([^)]*(\)|,$)', fncall) and - # Ignore pointers/references to arrays. - not Search(r' \([^)]+\)\[[^\]]+\]', fncall)): - if Search(r'\w\s*\(\s(?!\s*\\$)', fncall): # a ( used for a fn call - error(filename, linenum, 'whitespace/parens', 4, - 'Extra space after ( in function call') - elif Search(r'\(\s+(?!(\s*\\)|\()', fncall): - error(filename, linenum, 'whitespace/parens', 2, - 'Extra space after (') - if (Search(r'\w\s+\(', fncall) and - not Search(r'_{0,2}asm_{0,2}\s+_{0,2}volatile_{0,2}\s+\(', fncall) and - not Search(r'#\s*define|typedef|using\s+\w+\s*=', fncall) and - not Search(r'\w\s+\((\w+::)*\*\w+\)\(', fncall) and - not Search(r'\bcase\s+\(', fncall)): - # TODO(unknown): Space after an operator function seem to be a common - # error, silence those for now by restricting them to highest verbosity. - if Search(r'\boperator_*\b', line): - error(filename, linenum, 'whitespace/parens', 0, - 'Extra space before ( in function call') - else: - error(filename, linenum, 'whitespace/parens', 4, - 'Extra space before ( in function call') - # If the ) is followed only by a newline or a { + newline, assume it's - # part of a control statement (if/while/etc), and don't complain - if Search(r'[^)]\s+\)\s*[^{\s]', fncall): - # If the closing parenthesis is preceded by only whitespaces, - # try to give a more descriptive error message. - if Search(r'^\s+\)', fncall): - error(filename, linenum, 'whitespace/parens', 2, - 'Closing ) should be moved to the previous line') - else: - error(filename, linenum, 'whitespace/parens', 2, - 'Extra space before )') - - -def IsBlankLine(line): - """Returns true if the given line is blank. - - We consider a line to be blank if the line is empty or consists of - only white spaces. - - Args: - line: A line of a string. - - Returns: - True, if the given line is blank. - """ - return not line or line.isspace() - - -def CheckForNamespaceIndentation(filename, nesting_state, clean_lines, line, - error): - is_namespace_indent_item = ( - len(nesting_state.stack) > 1 and - nesting_state.stack[-1].check_namespace_indentation and - isinstance(nesting_state.previous_stack_top, _NamespaceInfo) and - nesting_state.previous_stack_top == nesting_state.stack[-2]) - - if ShouldCheckNamespaceIndentation(nesting_state, is_namespace_indent_item, - clean_lines.elided, line): - CheckItemIndentationInNamespace(filename, clean_lines.elided, - line, error) - - -def CheckForFunctionLengths(filename, clean_lines, linenum, - function_state, error): - """Reports for long function bodies. - - For an overview why this is done, see: - https://google-styleguide.googlecode.com/svn/trunk/cppguide.xml#Write_Short_Functions - - Uses a simplistic algorithm assuming other style guidelines - (especially spacing) are followed. - Only checks unindented functions, so class members are unchecked. - Trivial bodies are unchecked, so constructors with huge initializer lists - may be missed. - Blank/comment lines are not counted so as to avoid encouraging the removal - of vertical space and comments just to get through a lint check. - NOLINT *on the last line of a function* disables this check. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - function_state: Current function name and lines in body so far. - error: The function to call with any errors found. - """ - lines = clean_lines.lines - line = lines[linenum] - joined_line = '' - - starting_func = False - regexp = r'(\w(\w|::|\*|\&|\s)*)\(' # decls * & space::name( ... - match_result = Match(regexp, line) - if match_result: - # If the name is all caps and underscores, figure it's a macro and - # ignore it, unless it's TEST or TEST_F. - function_name = match_result.group(1).split()[-1] - if function_name == 'TEST' or function_name == 'TEST_F' or ( - not Match(r'[A-Z_]+$', function_name)): - starting_func = True - - if starting_func: - body_found = False - for start_linenum in xrange(linenum, clean_lines.NumLines()): - start_line = lines[start_linenum] - joined_line += ' ' + start_line.lstrip() - if Search(r'(;|})', start_line): # Declarations and trivial functions - body_found = True - break # ... ignore - if Search(r'{', start_line): - body_found = True - function = Search(r'((\w|:)*)\(', line).group(1) - if Match(r'TEST', function): # Handle TEST... macros - parameter_regexp = Search(r'(\(.*\))', joined_line) - if parameter_regexp: # Ignore bad syntax - function += parameter_regexp.group(1) - else: - function += '()' - function_state.Begin(function) - break - if not body_found: - # No body for the function (or evidence of a non-function) was found. - error(filename, linenum, 'readability/fn_size', 5, - 'Lint failed to find start of function body.') - elif Match(r'^\}\s*$', line): # function end - function_state.Check(error, filename, linenum) - function_state.End() - elif not Match(r'^\s*$', line): - function_state.Count() # Count non-blank/non-comment lines. - - -_RE_PATTERN_TODO = re.compile(r'^//(\s*)TODO(\(.+?\))?:?(\s|$)?') - - -def CheckComment(line, filename, linenum, next_line_start, error): - """Checks for common mistakes in comments. - - Args: - line: The line in question. - filename: The name of the current file. - linenum: The number of the line to check. - next_line_start: The first non-whitespace column of the next line. - error: The function to call with any errors found. - """ - commentpos = line.find('//') - if commentpos != -1: - # Check if the // may be in quotes. If so, ignore it - if re.sub(r'\\.', '', line[0:commentpos]).count('"') % 2 == 0: - # Allow one space for new scopes, two spaces otherwise: - if (not (Match(r'^.*{ *//', line) and next_line_start == commentpos) and - ((commentpos >= 1 and - line[commentpos-1] not in string.whitespace) or - (commentpos >= 2 and - line[commentpos-2] not in string.whitespace))): - error(filename, linenum, 'whitespace/comments', 2, - 'At least two spaces is best between code and comments') - - # Checks for common mistakes in TODO comments. - comment = line[commentpos:] - match = _RE_PATTERN_TODO.match(comment) - if match: - # One whitespace is correct; zero whitespace is handled elsewhere. - leading_whitespace = match.group(1) - if len(leading_whitespace) > 1: - error(filename, linenum, 'whitespace/todo', 2, - 'Too many spaces before TODO') - - username = match.group(2) - if not username: - error(filename, linenum, 'readability/todo', 2, - 'Missing username in TODO; it should look like ' - '"// TODO(my_username): Stuff."') - - middle_whitespace = match.group(3) - # Comparisons made explicit for correctness -- pylint: disable=g-explicit-bool-comparison - if middle_whitespace != ' ' and middle_whitespace != '': - error(filename, linenum, 'whitespace/todo', 2, - 'TODO(my_username) should be followed by a space') - - # If the comment contains an alphanumeric character, there - # should be a space somewhere between it and the // unless - # it's a /// or //! Doxygen comment. - if (Match(r'//[^ ]*\w', comment) and - not Match(r'(///|//\!)(\s+|$)', comment)): - error(filename, linenum, 'whitespace/comments', 4, - 'Should have a space between // and comment') - - -def CheckSpacing(filename, clean_lines, linenum, nesting_state, error): - """Checks for the correctness of various spacing issues in the code. - - Things we check for: spaces around operators, spaces after - if/for/while/switch, no spaces around parens in function calls, two - spaces between code and comment, don't start a block with a blank - line, don't end a function with a blank line, don't add a blank line - after public/protected/private, don't have too many blank lines in a row. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - nesting_state: A NestingState instance which maintains information about - the current stack of nested blocks being parsed. - error: The function to call with any errors found. - """ - - # Don't use "elided" lines here, otherwise we can't check commented lines. - # Don't want to use "raw" either, because we don't want to check inside C++11 - # raw strings, - raw = clean_lines.lines_without_raw_strings - line = raw[linenum] - - # Before nixing comments, check if the line is blank for no good - # reason. This includes the first line after a block is opened, and - # blank lines at the end of a function (ie, right before a line like '}' - # - # Skip all the blank line checks if we are immediately inside a - # namespace body. In other words, don't issue blank line warnings - # for this block: - # namespace { - # - # } - # - # A warning about missing end of namespace comments will be issued instead. - # - # Also skip blank line checks for 'extern "C"' blocks, which are formatted - # like namespaces. - if (IsBlankLine(line) and - not nesting_state.InNamespaceBody() and - not nesting_state.InExternC()): - elided = clean_lines.elided - prev_line = elided[linenum - 1] - prevbrace = prev_line.rfind('{') - # TODO(unknown): Don't complain if line before blank line, and line after, - # both start with alnums and are indented the same amount. - # This ignores whitespace at the start of a namespace block - # because those are not usually indented. - if prevbrace != -1 and prev_line[prevbrace:].find('}') == -1: - # OK, we have a blank line at the start of a code block. Before we - # complain, we check if it is an exception to the rule: The previous - # non-empty line has the parameters of a function header that are indented - # 4 spaces (because they did not fit in a 80 column line when placed on - # the same line as the function name). We also check for the case where - # the previous line is indented 6 spaces, which may happen when the - # initializers of a constructor do not fit into a 80 column line. - exception = False - if Match(r' {6}\w', prev_line): # Initializer list? - # We are looking for the opening column of initializer list, which - # should be indented 4 spaces to cause 6 space indentation afterwards. - search_position = linenum-2 - while (search_position >= 0 - and Match(r' {6}\w', elided[search_position])): - search_position -= 1 - exception = (search_position >= 0 - and elided[search_position][:5] == ' :') - else: - # Search for the function arguments or an initializer list. We use a - # simple heuristic here: If the line is indented 4 spaces; and we have a - # closing paren, without the opening paren, followed by an opening brace - # or colon (for initializer lists) we assume that it is the last line of - # a function header. If we have a colon indented 4 spaces, it is an - # initializer list. - exception = (Match(r' {4}\w[^\(]*\)\s*(const\s*)?(\{\s*$|:)', - prev_line) - or Match(r' {4}:', prev_line)) - - if not exception: - error(filename, linenum, 'whitespace/blank_line', 2, - 'Redundant blank line at the start of a code block ' - 'should be deleted.') - # Ignore blank lines at the end of a block in a long if-else - # chain, like this: - # if (condition1) { - # // Something followed by a blank line - # - # } else if (condition2) { - # // Something else - # } - if linenum + 1 < clean_lines.NumLines(): - next_line = raw[linenum + 1] - if (next_line - and Match(r'\s*}', next_line) - and next_line.find('} else ') == -1): - error(filename, linenum, 'whitespace/blank_line', 3, - 'Redundant blank line at the end of a code block ' - 'should be deleted.') - - matched = Match(r'\s*(public|protected|private):', prev_line) - if matched: - error(filename, linenum, 'whitespace/blank_line', 3, - 'Do not leave a blank line after "%s:"' % matched.group(1)) - - # Next, check comments - next_line_start = 0 - if linenum + 1 < clean_lines.NumLines(): - next_line = raw[linenum + 1] - next_line_start = len(next_line) - len(next_line.lstrip()) - CheckComment(line, filename, linenum, next_line_start, error) - - # get rid of comments and strings - line = clean_lines.elided[linenum] - - # You shouldn't have spaces before your brackets, except for C++11 attributes - # or maybe after 'delete []', 'return []() {};', or 'auto [abc, ...] = ...;'. - if (Search(r'\w\s+\[(?!\[)', line) and - not Search(r'(?:auto&?|delete|return)\s+\[', line)): - error(filename, linenum, 'whitespace/braces', 5, - 'Extra space before [') - - # In range-based for, we wanted spaces before and after the colon, but - # not around "::" tokens that might appear. - if (Search(r'for *\(.*[^:]:[^: ]', line) or - Search(r'for *\(.*[^: ]:[^:]', line)): - error(filename, linenum, 'whitespace/forcolon', 2, - 'Missing space around colon in range-based for loop') - - -def CheckOperatorSpacing(filename, clean_lines, linenum, error): - """Checks for horizontal spacing around operators. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ - line = clean_lines.elided[linenum] - - # Don't try to do spacing checks for operator methods. Do this by - # replacing the troublesome characters with something else, - # preserving column position for all other characters. - # - # The replacement is done repeatedly to avoid false positives from - # operators that call operators. - while True: - match = Match(r'^(.*\boperator\b)(\S+)(\s*\(.*)$', line) - if match: - line = match.group(1) + ('_' * len(match.group(2))) + match.group(3) - else: - break - - # We allow no-spaces around = within an if: "if ( (a=Foo()) == 0 )". - # Otherwise not. Note we only check for non-spaces on *both* sides; - # sometimes people put non-spaces on one side when aligning ='s among - # many lines (not that this is behavior that I approve of...) - if ((Search(r'[\w.]=', line) or - Search(r'=[\w.]', line)) - and not Search(r'\b(if|while|for) ', line) - # Operators taken from [lex.operators] in C++11 standard. - and not Search(r'(>=|<=|==|!=|&=|\^=|\|=|\+=|\*=|\/=|\%=)', line) - and not Search(r'operator=', line)): - error(filename, linenum, 'whitespace/operators', 4, - 'Missing spaces around =') - - # It's ok not to have spaces around binary operators like + - * /, but if - # there's too little whitespace, we get concerned. It's hard to tell, - # though, so we punt on this one for now. TODO. - - # You should always have whitespace around binary operators. - # - # Check <= and >= first to avoid false positives with < and >, then - # check non-include lines for spacing around < and >. - # - # If the operator is followed by a comma, assume it's be used in a - # macro context and don't do any checks. This avoids false - # positives. - # - # Note that && is not included here. This is because there are too - # many false positives due to RValue references. - match = Search(r'[^<>=!\s](==|!=|<=|>=|\|\|)[^<>=!\s,;\)]', line) - if match: - error(filename, linenum, 'whitespace/operators', 3, - 'Missing spaces around %s' % match.group(1)) - elif not Match(r'#.*include', line): - # Look for < that is not surrounded by spaces. This is only - # triggered if both sides are missing spaces, even though - # technically it should flag if at least one side is missing a - # space. This is done to avoid some false positives with shifts. - match = Match(r'^(.*[^\s<])<[^\s=<,]', line) - if match: - (_, _, end_pos) = CloseExpression( - clean_lines, linenum, len(match.group(1))) - if end_pos <= -1: - error(filename, linenum, 'whitespace/operators', 3, - 'Missing spaces around <') - - # Look for > that is not surrounded by spaces. Similar to the - # above, we only trigger if both sides are missing spaces to avoid - # false positives with shifts. - match = Match(r'^(.*[^-\s>])>[^\s=>,]', line) - if match: - (_, _, start_pos) = ReverseCloseExpression( - clean_lines, linenum, len(match.group(1))) - if start_pos <= -1: - error(filename, linenum, 'whitespace/operators', 3, - 'Missing spaces around >') - - # We allow no-spaces around << when used like this: 10<<20, but - # not otherwise (particularly, not when used as streams) - # - # We also allow operators following an opening parenthesis, since - # those tend to be macros that deal with operators. - match = Search(r'(operator|[^\s(<])(?:L|UL|LL|ULL|l|ul|ll|ull)?<<([^\s,=<])', line) - if (match and not (match.group(1).isdigit() and match.group(2).isdigit()) and - not (match.group(1) == 'operator' and match.group(2) == ';')): - error(filename, linenum, 'whitespace/operators', 3, - 'Missing spaces around <<') - - # We allow no-spaces around >> for almost anything. This is because - # C++11 allows ">>" to close nested templates, which accounts for - # most cases when ">>" is not followed by a space. - # - # We still warn on ">>" followed by alpha character, because that is - # likely due to ">>" being used for right shifts, e.g.: - # value >> alpha - # - # When ">>" is used to close templates, the alphanumeric letter that - # follows would be part of an identifier, and there should still be - # a space separating the template type and the identifier. - # type> alpha - match = Search(r'>>[a-zA-Z_]', line) - if match: - error(filename, linenum, 'whitespace/operators', 3, - 'Missing spaces around >>') - - # There shouldn't be space around unary operators - match = Search(r'(!\s|~\s|[\s]--[\s;]|[\s]\+\+[\s;])', line) - if match: - error(filename, linenum, 'whitespace/operators', 4, - 'Extra space for operator %s' % match.group(1)) - - -def CheckParenthesisSpacing(filename, clean_lines, linenum, error): - """Checks for horizontal spacing around parentheses. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ - line = clean_lines.elided[linenum] - - # No spaces after an if, while, switch, or for - match = Search(r' (if\(|for\(|while\(|switch\()', line) - if match: - error(filename, linenum, 'whitespace/parens', 5, - 'Missing space before ( in %s' % match.group(1)) - - # For if/for/while/switch, the left and right parens should be - # consistent about how many spaces are inside the parens, and - # there should either be zero or one spaces inside the parens. - # We don't want: "if ( foo)" or "if ( foo )". - # Exception: "for ( ; foo; bar)" and "for (foo; bar; )" are allowed. - match = Search(r'\b(if|for|while|switch)\s*' - r'\(([ ]*)(.).*[^ ]+([ ]*)\)\s*{\s*$', - line) - if match: - if len(match.group(2)) != len(match.group(4)): - if not (match.group(3) == ';' and - len(match.group(2)) == 1 + len(match.group(4)) or - not match.group(2) and Search(r'\bfor\s*\(.*; \)', line)): - error(filename, linenum, 'whitespace/parens', 5, - 'Mismatching spaces inside () in %s' % match.group(1)) - if len(match.group(2)) not in [0, 1]: - error(filename, linenum, 'whitespace/parens', 5, - 'Should have zero or one spaces inside ( and ) in %s' % - match.group(1)) - - -def CheckCommaSpacing(filename, clean_lines, linenum, error): - """Checks for horizontal spacing near commas and semicolons. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ - raw = clean_lines.lines_without_raw_strings - line = clean_lines.elided[linenum] - - # You should always have a space after a comma (either as fn arg or operator) - # - # This does not apply when the non-space character following the - # comma is another comma, since the only time when that happens is - # for empty macro arguments. - # - # We run this check in two passes: first pass on elided lines to - # verify that lines contain missing whitespaces, second pass on raw - # lines to confirm that those missing whitespaces are not due to - # elided comments. - if (Search(r',[^,\s]', ReplaceAll(r'\boperator\s*,\s*\(', 'F(', line)) and - Search(r',[^,\s]', raw[linenum])): - error(filename, linenum, 'whitespace/comma', 3, - 'Missing space after ,') - - # You should always have a space after a semicolon - # except for few corner cases - # TODO(unknown): clarify if 'if (1) { return 1;}' is requires one more - # space after ; - if Search(r';[^\s};\\)/]', line): - error(filename, linenum, 'whitespace/semicolon', 3, - 'Missing space after ;') - - -def _IsType(clean_lines, nesting_state, expr): - """Check if expression looks like a type name, returns true if so. - - Args: - clean_lines: A CleansedLines instance containing the file. - nesting_state: A NestingState instance which maintains information about - the current stack of nested blocks being parsed. - expr: The expression to check. - Returns: - True, if token looks like a type. - """ - # Keep only the last token in the expression - last_word = Match(r'^.*(\b\S+)$', expr) - if last_word: - token = last_word.group(1) - else: - token = expr - - # Match native types and stdint types - if _TYPES.match(token): - return True - - # Try a bit harder to match templated types. Walk up the nesting - # stack until we find something that resembles a typename - # declaration for what we are looking for. - typename_pattern = (r'\b(?:typename|class|struct)\s+' + re.escape(token) + - r'\b') - block_index = len(nesting_state.stack) - 1 - while block_index >= 0: - if isinstance(nesting_state.stack[block_index], _NamespaceInfo): - return False - - # Found where the opening brace is. We want to scan from this - # line up to the beginning of the function, minus a few lines. - # template - # class C - # : public ... { // start scanning here - last_line = nesting_state.stack[block_index].starting_linenum - - next_block_start = 0 - if block_index > 0: - next_block_start = nesting_state.stack[block_index - 1].starting_linenum - first_line = last_line - while first_line >= next_block_start: - if clean_lines.elided[first_line].find('template') >= 0: - break - first_line -= 1 - if first_line < next_block_start: - # Didn't find any "template" keyword before reaching the next block, - # there are probably no template things to check for this block - block_index -= 1 - continue - - # Look for typename in the specified range - for i in xrange(first_line, last_line + 1, 1): - if Search(typename_pattern, clean_lines.elided[i]): - return True - block_index -= 1 - - return False - - -def CheckBracesSpacing(filename, clean_lines, linenum, nesting_state, error): - """Checks for horizontal spacing near commas. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - nesting_state: A NestingState instance which maintains information about - the current stack of nested blocks being parsed. - error: The function to call with any errors found. - """ - line = clean_lines.elided[linenum] - - # Except after an opening paren, or after another opening brace (in case of - # an initializer list, for instance), you should have spaces before your - # braces when they are delimiting blocks, classes, namespaces etc. - # And since you should never have braces at the beginning of a line, - # this is an easy test. Except that braces used for initialization don't - # follow the same rule; we often don't want spaces before those. - match = Match(r'^(.*[^ ({>]){', line) - - if match: - # Try a bit harder to check for brace initialization. This - # happens in one of the following forms: - # Constructor() : initializer_list_{} { ... } - # Constructor{}.MemberFunction() - # Type variable{}; - # FunctionCall(type{}, ...); - # LastArgument(..., type{}); - # LOG(INFO) << type{} << " ..."; - # map_of_type[{...}] = ...; - # ternary = expr ? new type{} : nullptr; - # OuterTemplate{}> - # - # We check for the character following the closing brace, and - # silence the warning if it's one of those listed above, i.e. - # "{.;,)<>]:". - # - # To account for nested initializer list, we allow any number of - # closing braces up to "{;,)<". We can't simply silence the - # warning on first sight of closing brace, because that would - # cause false negatives for things that are not initializer lists. - # Silence this: But not this: - # Outer{ if (...) { - # Inner{...} if (...){ // Missing space before { - # }; } - # - # There is a false negative with this approach if people inserted - # spurious semicolons, e.g. "if (cond){};", but we will catch the - # spurious semicolon with a separate check. - leading_text = match.group(1) - (endline, endlinenum, endpos) = CloseExpression( - clean_lines, linenum, len(match.group(1))) - trailing_text = '' - if endpos > -1: - trailing_text = endline[endpos:] - for offset in xrange(endlinenum + 1, - min(endlinenum + 3, clean_lines.NumLines() - 1)): - trailing_text += clean_lines.elided[offset] - # We also suppress warnings for `uint64_t{expression}` etc., as the style - # guide recommends brace initialization for integral types to avoid - # overflow/truncation. - if (not Match(r'^[\s}]*[{.;,)<>\]:]', trailing_text) - and not _IsType(clean_lines, nesting_state, leading_text)): - error(filename, linenum, 'whitespace/braces', 5, - 'Missing space before {') - - # Make sure '} else {' has spaces. - if Search(r'}else', line): - error(filename, linenum, 'whitespace/braces', 5, - 'Missing space before else') - - # You shouldn't have a space before a semicolon at the end of the line. - # There's a special case for "for" since the style guide allows space before - # the semicolon there. - if Search(r':\s*;\s*$', line): - error(filename, linenum, 'whitespace/semicolon', 5, - 'Semicolon defining empty statement. Use {} instead.') - elif Search(r'^\s*;\s*$', line): - error(filename, linenum, 'whitespace/semicolon', 5, - 'Line contains only semicolon. If this should be an empty statement, ' - 'use {} instead.') - elif (Search(r'\s+;\s*$', line) and - not Search(r'\bfor\b', line)): - error(filename, linenum, 'whitespace/semicolon', 5, - 'Extra space before last semicolon. If this should be an empty ' - 'statement, use {} instead.') - - -def IsDecltype(clean_lines, linenum, column): - """Check if the token ending on (linenum, column) is decltype(). - - Args: - clean_lines: A CleansedLines instance containing the file. - linenum: the number of the line to check. - column: end column of the token to check. - Returns: - True if this token is decltype() expression, False otherwise. - """ - (text, _, start_col) = ReverseCloseExpression(clean_lines, linenum, column) - if start_col < 0: - return False - if Search(r'\bdecltype\s*$', text[0:start_col]): - return True - return False - -def CheckSectionSpacing(filename, clean_lines, class_info, linenum, error): - """Checks for additional blank line issues related to sections. - - Currently the only thing checked here is blank line before protected/private. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - class_info: A _ClassInfo objects. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ - # Skip checks if the class is small, where small means 25 lines or less. - # 25 lines seems like a good cutoff since that's the usual height of - # terminals, and any class that can't fit in one screen can't really - # be considered "small". - # - # Also skip checks if we are on the first line. This accounts for - # classes that look like - # class Foo { public: ... }; - # - # If we didn't find the end of the class, last_line would be zero, - # and the check will be skipped by the first condition. - if (class_info.last_line - class_info.starting_linenum <= 24 or - linenum <= class_info.starting_linenum): - return - - matched = Match(r'\s*(public|protected|private):', clean_lines.lines[linenum]) - if matched: - # Issue warning if the line before public/protected/private was - # not a blank line, but don't do this if the previous line contains - # "class" or "struct". This can happen two ways: - # - We are at the beginning of the class. - # - We are forward-declaring an inner class that is semantically - # private, but needed to be public for implementation reasons. - # Also ignores cases where the previous line ends with a backslash as can be - # common when defining classes in C macros. - prev_line = clean_lines.lines[linenum - 1] - if (not IsBlankLine(prev_line) and - not Search(r'\b(class|struct)\b', prev_line) and - not Search(r'\\$', prev_line)): - # Try a bit harder to find the beginning of the class. This is to - # account for multi-line base-specifier lists, e.g.: - # class Derived - # : public Base { - end_class_head = class_info.starting_linenum - for i in range(class_info.starting_linenum, linenum): - if Search(r'\{\s*$', clean_lines.lines[i]): - end_class_head = i - break - if end_class_head < linenum - 1: - error(filename, linenum, 'whitespace/blank_line', 3, - '"%s:" should be preceded by a blank line' % matched.group(1)) - - -def GetPreviousNonBlankLine(clean_lines, linenum): - """Return the most recent non-blank line and its line number. - - Args: - clean_lines: A CleansedLines instance containing the file contents. - linenum: The number of the line to check. - - Returns: - A tuple with two elements. The first element is the contents of the last - non-blank line before the current line, or the empty string if this is the - first non-blank line. The second is the line number of that line, or -1 - if this is the first non-blank line. - """ - - prevlinenum = linenum - 1 - while prevlinenum >= 0: - prevline = clean_lines.elided[prevlinenum] - if not IsBlankLine(prevline): # if not a blank line... - return (prevline, prevlinenum) - prevlinenum -= 1 - return ('', -1) - - -def CheckBraces(filename, clean_lines, linenum, error): - """Looks for misplaced braces (e.g. at the end of line). - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ - - line = clean_lines.elided[linenum] # get rid of comments and strings - - if Match(r'\s*{\s*$', line): - # We allow an open brace to start a line in the case where someone is using - # braces in a block to explicitly create a new scope, which is commonly used - # to control the lifetime of stack-allocated variables. Braces are also - # used for brace initializers inside function calls. We don't detect this - # perfectly: we just don't complain if the last non-whitespace character on - # the previous non-blank line is ',', ';', ':', '(', '{', or '}', or if the - # previous line starts a preprocessor block. We also allow a brace on the - # following line if it is part of an array initialization and would not fit - # within the 80 character limit of the preceding line. - prevline = GetPreviousNonBlankLine(clean_lines, linenum)[0] - if (not Search(r'[,;:}{(]\s*$', prevline) and - not Match(r'\s*#', prevline) and - not (GetLineWidth(prevline) > _line_length - 2 and '[]' in prevline)): - error(filename, linenum, 'whitespace/braces', 4, - '{ should almost always be at the end of the previous line') - - # An else clause should be on the same line as the preceding closing brace. - if Match(r'\s*else\b\s*(?:if\b|\{|$)', line): - prevline = GetPreviousNonBlankLine(clean_lines, linenum)[0] - if Match(r'\s*}\s*$', prevline): - error(filename, linenum, 'whitespace/newline', 4, - 'An else should appear on the same line as the preceding }') - - # If braces come on one side of an else, they should be on both. - # However, we have to worry about "else if" that spans multiple lines! - if Search(r'else if\s*\(', line): # could be multi-line if - brace_on_left = bool(Search(r'}\s*else if\s*\(', line)) - # find the ( after the if - pos = line.find('else if') - pos = line.find('(', pos) - if pos > 0: - (endline, _, endpos) = CloseExpression(clean_lines, linenum, pos) - brace_on_right = endline[endpos:].find('{') != -1 - if brace_on_left != brace_on_right: # must be brace after if - error(filename, linenum, 'readability/braces', 5, - 'If an else has a brace on one side, it should have it on both') - elif Search(r'}\s*else[^{]*$', line) or Match(r'[^}]*else\s*{', line): - error(filename, linenum, 'readability/braces', 5, - 'If an else has a brace on one side, it should have it on both') - - # Likewise, an else should never have the else clause on the same line - if Search(r'\belse [^\s{]', line) and not Search(r'\belse if\b', line): - error(filename, linenum, 'whitespace/newline', 4, - 'Else clause should never be on same line as else (use 2 lines)') - - # In the same way, a do/while should never be on one line - if Match(r'\s*do [^\s{]', line): - error(filename, linenum, 'whitespace/newline', 4, - 'do/while clauses should not be on a single line') - - # Check single-line if/else bodies. The style guide says 'curly braces are not - # required for single-line statements'. We additionally allow multi-line, - # single statements, but we reject anything with more than one semicolon in - # it. This means that the first semicolon after the if should be at the end of - # its line, and the line after that should have an indent level equal to or - # lower than the if. We also check for ambiguous if/else nesting without - # braces. - if_else_match = Search(r'\b(if\s*(|constexpr)\s*\(|else\b)', line) - if if_else_match and not Match(r'\s*#', line): - if_indent = GetIndentLevel(line) - endline, endlinenum, endpos = line, linenum, if_else_match.end() - if_match = Search(r'\bif\s*(|constexpr)\s*\(', line) - if if_match: - # This could be a multiline if condition, so find the end first. - pos = if_match.end() - 1 - (endline, endlinenum, endpos) = CloseExpression(clean_lines, linenum, pos) - # Check for an opening brace, either directly after the if or on the next - # line. If found, this isn't a single-statement conditional. - if (not Match(r'\s*{', endline[endpos:]) - and not (Match(r'\s*$', endline[endpos:]) - and endlinenum < (len(clean_lines.elided) - 1) - and Match(r'\s*{', clean_lines.elided[endlinenum + 1]))): - while (endlinenum < len(clean_lines.elided) - and ';' not in clean_lines.elided[endlinenum][endpos:]): - endlinenum += 1 - endpos = 0 - if endlinenum < len(clean_lines.elided): - endline = clean_lines.elided[endlinenum] - # We allow a mix of whitespace and closing braces (e.g. for one-liner - # methods) and a single \ after the semicolon (for macros) - endpos = endline.find(';') - if not Match(r';[\s}]*(\\?)$', endline[endpos:]): - # Semicolon isn't the last character, there's something trailing. - # Output a warning if the semicolon is not contained inside - # a lambda expression. - if not Match(r'^[^{};]*\[[^\[\]]*\][^{}]*\{[^{}]*\}\s*\)*[;,]\s*$', - endline): - error(filename, linenum, 'readability/braces', 4, - 'If/else bodies with multiple statements require braces') - elif endlinenum < len(clean_lines.elided) - 1: - # Make sure the next line is dedented - next_line = clean_lines.elided[endlinenum + 1] - next_indent = GetIndentLevel(next_line) - # With ambiguous nested if statements, this will error out on the - # if that *doesn't* match the else, regardless of whether it's the - # inner one or outer one. - if (if_match and Match(r'\s*else\b', next_line) - and next_indent != if_indent): - error(filename, linenum, 'readability/braces', 4, - 'Else clause should be indented at the same level as if. ' - 'Ambiguous nested if/else chains require braces.') - elif next_indent > if_indent: - error(filename, linenum, 'readability/braces', 4, - 'If/else bodies with multiple statements require braces') - - -def CheckTrailingSemicolon(filename, clean_lines, linenum, error): - """Looks for redundant trailing semicolon. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ - - line = clean_lines.elided[linenum] - - # Block bodies should not be followed by a semicolon. Due to C++11 - # brace initialization, there are more places where semicolons are - # required than not, so we explicitly list the allowed rules rather - # than listing the disallowed ones. These are the places where "};" - # should be replaced by just "}": - # 1. Some flavor of block following closing parenthesis: - # for (;;) {}; - # while (...) {}; - # switch (...) {}; - # Function(...) {}; - # if (...) {}; - # if (...) else if (...) {}; - # - # 2. else block: - # if (...) else {}; - # - # 3. const member function: - # Function(...) const {}; - # - # 4. Block following some statement: - # x = 42; - # {}; - # - # 5. Block at the beginning of a function: - # Function(...) { - # {}; - # } - # - # Note that naively checking for the preceding "{" will also match - # braces inside multi-dimensional arrays, but this is fine since - # that expression will not contain semicolons. - # - # 6. Block following another block: - # while (true) {} - # {}; - # - # 7. End of namespaces: - # namespace {}; - # - # These semicolons seems far more common than other kinds of - # redundant semicolons, possibly due to people converting classes - # to namespaces. For now we do not warn for this case. - # - # Try matching case 1 first. - match = Match(r'^(.*\)\s*)\{', line) - if match: - # Matched closing parenthesis (case 1). Check the token before the - # matching opening parenthesis, and don't warn if it looks like a - # macro. This avoids these false positives: - # - macro that defines a base class - # - multi-line macro that defines a base class - # - macro that defines the whole class-head - # - # But we still issue warnings for macros that we know are safe to - # warn, specifically: - # - TEST, TEST_F, TEST_P, MATCHER, MATCHER_P - # - TYPED_TEST - # - INTERFACE_DEF - # - EXCLUSIVE_LOCKS_REQUIRED, SHARED_LOCKS_REQUIRED, LOCKS_EXCLUDED: - # - # We implement a list of safe macros instead of a list of - # unsafe macros, even though the latter appears less frequently in - # google code and would have been easier to implement. This is because - # the downside for getting the allowed checks wrong means some extra - # semicolons, while the downside for getting disallowed checks wrong - # would result in compile errors. - # - # In addition to macros, we also don't want to warn on - # - Compound literals - # - Lambdas - # - alignas specifier with anonymous structs - # - decltype - closing_brace_pos = match.group(1).rfind(')') - opening_parenthesis = ReverseCloseExpression( - clean_lines, linenum, closing_brace_pos) - if opening_parenthesis[2] > -1: - line_prefix = opening_parenthesis[0][0:opening_parenthesis[2]] - macro = Search(r'\b([A-Z_][A-Z0-9_]*)\s*$', line_prefix) - func = Match(r'^(.*\])\s*$', line_prefix) - if ((macro and - macro.group(1) not in ( - 'TEST', 'TEST_F', 'MATCHER', 'MATCHER_P', 'TYPED_TEST', - 'EXCLUSIVE_LOCKS_REQUIRED', 'SHARED_LOCKS_REQUIRED', - 'LOCKS_EXCLUDED', 'INTERFACE_DEF')) or - (func and not Search(r'\boperator\s*\[\s*\]', func.group(1))) or - Search(r'\b(?:struct|union)\s+alignas\s*$', line_prefix) or - Search(r'\bdecltype$', line_prefix) or - Search(r'\s+=\s*$', line_prefix)): - match = None - if (match and - opening_parenthesis[1] > 1 and - Search(r'\]\s*$', clean_lines.elided[opening_parenthesis[1] - 1])): - # Multi-line lambda-expression - match = None - - else: - # Try matching cases 2-3. - match = Match(r'^(.*(?:else|\)\s*const)\s*)\{', line) - if not match: - # Try matching cases 4-6. These are always matched on separate lines. - # - # Note that we can't simply concatenate the previous line to the - # current line and do a single match, otherwise we may output - # duplicate warnings for the blank line case: - # if (cond) { - # // blank line - # } - prevline = GetPreviousNonBlankLine(clean_lines, linenum)[0] - if prevline and Search(r'[;{}]\s*$', prevline): - match = Match(r'^(\s*)\{', line) - - # Check matching closing brace - if match: - (endline, endlinenum, endpos) = CloseExpression( - clean_lines, linenum, len(match.group(1))) - if endpos > -1 and Match(r'^\s*;', endline[endpos:]): - # Current {} pair is eligible for semicolon check, and we have found - # the redundant semicolon, output warning here. - # - # Note: because we are scanning forward for opening braces, and - # outputting warnings for the matching closing brace, if there are - # nested blocks with trailing semicolons, we will get the error - # messages in reversed order. - - # We need to check the line forward for NOLINT - raw_lines = clean_lines.raw_lines - ParseNolintSuppressions(filename, raw_lines[endlinenum-1], endlinenum-1, - error) - ParseNolintSuppressions(filename, raw_lines[endlinenum], endlinenum, - error) - - error(filename, endlinenum, 'readability/braces', 4, - "You don't need a ; after a }") - - -def CheckEmptyBlockBody(filename, clean_lines, linenum, error): - """Look for empty loop/conditional body with only a single semicolon. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ - - # Search for loop keywords at the beginning of the line. Because only - # whitespaces are allowed before the keywords, this will also ignore most - # do-while-loops, since those lines should start with closing brace. - # - # We also check "if" blocks here, since an empty conditional block - # is likely an error. - line = clean_lines.elided[linenum] - matched = Match(r'\s*(for|while|if)\s*\(', line) - if matched: - # Find the end of the conditional expression. - (end_line, end_linenum, end_pos) = CloseExpression( - clean_lines, linenum, line.find('(')) - - # Output warning if what follows the condition expression is a semicolon. - # No warning for all other cases, including whitespace or newline, since we - # have a separate check for semicolons preceded by whitespace. - if end_pos >= 0 and Match(r';', end_line[end_pos:]): - if matched.group(1) == 'if': - error(filename, end_linenum, 'whitespace/empty_conditional_body', 5, - 'Empty conditional bodies should use {}') - else: - error(filename, end_linenum, 'whitespace/empty_loop_body', 5, - 'Empty loop bodies should use {} or continue') - - # Check for if statements that have completely empty bodies (no comments) - # and no else clauses. - if end_pos >= 0 and matched.group(1) == 'if': - # Find the position of the opening { for the if statement. - # Return without logging an error if it has no brackets. - opening_linenum = end_linenum - opening_line_fragment = end_line[end_pos:] - # Loop until EOF or find anything that's not whitespace or opening {. - while not Search(r'^\s*\{', opening_line_fragment): - if Search(r'^(?!\s*$)', opening_line_fragment): - # Conditional has no brackets. - return - opening_linenum += 1 - if opening_linenum == len(clean_lines.elided): - # Couldn't find conditional's opening { or any code before EOF. - return - opening_line_fragment = clean_lines.elided[opening_linenum] - # Set opening_line (opening_line_fragment may not be entire opening line). - opening_line = clean_lines.elided[opening_linenum] - - # Find the position of the closing }. - opening_pos = opening_line_fragment.find('{') - if opening_linenum == end_linenum: - # We need to make opening_pos relative to the start of the entire line. - opening_pos += end_pos - (closing_line, closing_linenum, closing_pos) = CloseExpression( - clean_lines, opening_linenum, opening_pos) - if closing_pos < 0: - return - - # Now construct the body of the conditional. This consists of the portion - # of the opening line after the {, all lines until the closing line, - # and the portion of the closing line before the }. - if (clean_lines.raw_lines[opening_linenum] != - CleanseComments(clean_lines.raw_lines[opening_linenum])): - # Opening line ends with a comment, so conditional isn't empty. - return - if closing_linenum > opening_linenum: - # Opening line after the {. Ignore comments here since we checked above. - bodylist = list(opening_line[opening_pos+1:]) - # All lines until closing line, excluding closing line, with comments. - bodylist.extend(clean_lines.raw_lines[opening_linenum+1:closing_linenum]) - # Closing line before the }. Won't (and can't) have comments. - bodylist.append(clean_lines.elided[closing_linenum][:closing_pos-1]) - body = '\n'.join(bodylist) - else: - # If statement has brackets and fits on a single line. - body = opening_line[opening_pos+1:closing_pos-1] - - # Check if the body is empty - if not _EMPTY_CONDITIONAL_BODY_PATTERN.search(body): - return - # The body is empty. Now make sure there's not an else clause. - current_linenum = closing_linenum - current_line_fragment = closing_line[closing_pos:] - # Loop until EOF or find anything that's not whitespace or else clause. - while Search(r'^\s*$|^(?=\s*else)', current_line_fragment): - if Search(r'^(?=\s*else)', current_line_fragment): - # Found an else clause, so don't log an error. - return - current_linenum += 1 - if current_linenum == len(clean_lines.elided): - break - current_line_fragment = clean_lines.elided[current_linenum] - - # The body is empty and there's no else clause until EOF or other code. - error(filename, end_linenum, 'whitespace/empty_if_body', 4, - ('If statement had no body and no else clause')) - - -def FindCheckMacro(line): - """Find a replaceable CHECK-like macro. - - Args: - line: line to search on. - Returns: - (macro name, start position), or (None, -1) if no replaceable - macro is found. - """ - for macro in _CHECK_MACROS: - i = line.find(macro) - if i >= 0: - # Find opening parenthesis. Do a regular expression match here - # to make sure that we are matching the expected CHECK macro, as - # opposed to some other macro that happens to contain the CHECK - # substring. - matched = Match(r'^(.*\b' + macro + r'\s*)\(', line) - if not matched: - continue - return (macro, len(matched.group(1))) - return (None, -1) - - -def CheckCheck(filename, clean_lines, linenum, error): - """Checks the use of CHECK and EXPECT macros. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ - - # Decide the set of replacement macros that should be suggested - lines = clean_lines.elided - (check_macro, start_pos) = FindCheckMacro(lines[linenum]) - if not check_macro: - return - - # Find end of the boolean expression by matching parentheses - (last_line, end_line, end_pos) = CloseExpression( - clean_lines, linenum, start_pos) - if end_pos < 0: - return - - # If the check macro is followed by something other than a - # semicolon, assume users will log their own custom error messages - # and don't suggest any replacements. - if not Match(r'\s*;', last_line[end_pos:]): - return - - if linenum == end_line: - expression = lines[linenum][start_pos + 1:end_pos - 1] - else: - expression = lines[linenum][start_pos + 1:] - for i in xrange(linenum + 1, end_line): - expression += lines[i] - expression += last_line[0:end_pos - 1] - - # Parse expression so that we can take parentheses into account. - # This avoids false positives for inputs like "CHECK((a < 4) == b)", - # which is not replaceable by CHECK_LE. - lhs = '' - rhs = '' - operator = None - while expression: - matched = Match(r'^\s*(<<|<<=|>>|>>=|->\*|->|&&|\|\||' - r'==|!=|>=|>|<=|<|\()(.*)$', expression) - if matched: - token = matched.group(1) - if token == '(': - # Parenthesized operand - expression = matched.group(2) - (end, _) = FindEndOfExpressionInLine(expression, 0, ['(']) - if end < 0: - return # Unmatched parenthesis - lhs += '(' + expression[0:end] - expression = expression[end:] - elif token in ('&&', '||'): - # Logical and/or operators. This means the expression - # contains more than one term, for example: - # CHECK(42 < a && a < b); - # - # These are not replaceable with CHECK_LE, so bail out early. - return - elif token in ('<<', '<<=', '>>', '>>=', '->*', '->'): - # Non-relational operator - lhs += token - expression = matched.group(2) - else: - # Relational operator - operator = token - rhs = matched.group(2) - break - else: - # Unparenthesized operand. Instead of appending to lhs one character - # at a time, we do another regular expression match to consume several - # characters at once if possible. Trivial benchmark shows that this - # is more efficient when the operands are longer than a single - # character, which is generally the case. - matched = Match(r'^([^-=!<>()&|]+)(.*)$', expression) - if not matched: - matched = Match(r'^(\s*\S)(.*)$', expression) - if not matched: - break - lhs += matched.group(1) - expression = matched.group(2) - - # Only apply checks if we got all parts of the boolean expression - if not (lhs and operator and rhs): - return - - # Check that rhs do not contain logical operators. We already know - # that lhs is fine since the loop above parses out && and ||. - if rhs.find('&&') > -1 or rhs.find('||') > -1: - return - - # At least one of the operands must be a constant literal. This is - # to avoid suggesting replacements for unprintable things like - # CHECK(variable != iterator) - # - # The following pattern matches decimal, hex integers, strings, and - # characters (in that order). - lhs = lhs.strip() - rhs = rhs.strip() - match_constant = r'^([-+]?(\d+|0[xX][0-9a-fA-F]+)[lLuU]{0,3}|".*"|\'.*\')$' - if Match(match_constant, lhs) or Match(match_constant, rhs): - # Note: since we know both lhs and rhs, we can provide a more - # descriptive error message like: - # Consider using CHECK_EQ(x, 42) instead of CHECK(x == 42) - # Instead of: - # Consider using CHECK_EQ instead of CHECK(a == b) - # - # We are still keeping the less descriptive message because if lhs - # or rhs gets long, the error message might become unreadable. - error(filename, linenum, 'readability/check', 2, - 'Consider using %s instead of %s(a %s b)' % ( - _CHECK_REPLACEMENT[check_macro][operator], - check_macro, operator)) - - -def CheckAltTokens(filename, clean_lines, linenum, error): - """Check alternative keywords being used in boolean expressions. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ - line = clean_lines.elided[linenum] - - # Avoid preprocessor lines - if Match(r'^\s*#', line): - return - - # Last ditch effort to avoid multi-line comments. This will not help - # if the comment started before the current line or ended after the - # current line, but it catches most of the false positives. At least, - # it provides a way to workaround this warning for people who use - # multi-line comments in preprocessor macros. - # - # TODO(unknown): remove this once cpplint has better support for - # multi-line comments. - if line.find('/*') >= 0 or line.find('*/') >= 0: - return - - for match in _ALT_TOKEN_REPLACEMENT_PATTERN.finditer(line): - error(filename, linenum, 'readability/alt_tokens', 2, - 'Use operator %s instead of %s' % ( - _ALT_TOKEN_REPLACEMENT[match.group(1)], match.group(1))) - - -def GetLineWidth(line): - """Determines the width of the line in column positions. - - Args: - line: A string, which may be a Unicode string. - - Returns: - The width of the line in column positions, accounting for Unicode - combining characters and wide characters. - """ - if isinstance(line, unicode): - width = 0 - for uc in unicodedata.normalize('NFC', line): - if unicodedata.east_asian_width(uc) in ('W', 'F'): - width += 2 - elif not unicodedata.combining(uc): - # Issue 337 - # https://mail.python.org/pipermail/python-list/2012-August/628809.html - if (sys.version_info.major, sys.version_info.minor) <= (3, 2): - # https://github.com/python/cpython/blob/2.7/Include/unicodeobject.h#L81 - is_wide_build = sysconfig.get_config_var("Py_UNICODE_SIZE") >= 4 - # https://github.com/python/cpython/blob/2.7/Objects/unicodeobject.c#L564 - is_low_surrogate = 0xDC00 <= ord(uc) <= 0xDFFF - if not is_wide_build and is_low_surrogate: - width -= 1 - - width += 1 - return width - else: - return len(line) - - -def CheckStyle(filename, clean_lines, linenum, file_extension, nesting_state, - error): - """Checks rules from the 'C++ style rules' section of cppguide.html. - - Most of these rules are hard to test (naming, comment style), but we - do what we can. In particular we check for 2-space indents, line lengths, - tab usage, spaces inside code, etc. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - file_extension: The extension (without the dot) of the filename. - nesting_state: A NestingState instance which maintains information about - the current stack of nested blocks being parsed. - error: The function to call with any errors found. - """ - - # Don't use "elided" lines here, otherwise we can't check commented lines. - # Don't want to use "raw" either, because we don't want to check inside C++11 - # raw strings, - raw_lines = clean_lines.lines_without_raw_strings - line = raw_lines[linenum] - prev = raw_lines[linenum - 1] if linenum > 0 else '' - - if line.find('\t') != -1: - error(filename, linenum, 'whitespace/tab', 1, - 'Tab found; better to use spaces') - - # One or three blank spaces at the beginning of the line is weird; it's - # hard to reconcile that with 2-space indents. - # NOTE: here are the conditions rob pike used for his tests. Mine aren't - # as sophisticated, but it may be worth becoming so: RLENGTH==initial_spaces - # if(RLENGTH > 20) complain = 0; - # if(match($0, " +(error|private|public|protected):")) complain = 0; - # if(match(prev, "&& *$")) complain = 0; - # if(match(prev, "\\|\\| *$")) complain = 0; - # if(match(prev, "[\",=><] *$")) complain = 0; - # if(match($0, " <<")) complain = 0; - # if(match(prev, " +for \\(")) complain = 0; - # if(prevodd && match(prevprev, " +for \\(")) complain = 0; - scope_or_label_pattern = r'\s*(?:public|private|protected|signals)(?:\s+(?:slots\s*)?)?:\s*\\?$' - classinfo = nesting_state.InnermostClass() - initial_spaces = 0 - cleansed_line = clean_lines.elided[linenum] - while initial_spaces < len(line) and line[initial_spaces] == ' ': - initial_spaces += 1 - # There are certain situations we allow one space, notably for - # section labels, and also lines containing multi-line raw strings. - # We also don't check for lines that look like continuation lines - # (of lines ending in double quotes, commas, equals, or angle brackets) - # because the rules for how to indent those are non-trivial. - if (not Search(r'[",=><] *$', prev) and - (initial_spaces == 1 or initial_spaces == 3) and - not Match(scope_or_label_pattern, cleansed_line) and - not (clean_lines.raw_lines[linenum] != line and - Match(r'^\s*""', line))): - error(filename, linenum, 'whitespace/indent', 3, - 'Weird number of spaces at line-start. ' - 'Are you using a 2-space indent?') - - if line and line[-1].isspace(): - error(filename, linenum, 'whitespace/end_of_line', 4, - 'Line ends in whitespace. Consider deleting these extra spaces.') - - # Check if the line is a header guard. - is_header_guard = False - if IsHeaderExtension(file_extension): - cppvar = GetHeaderGuardCPPVariable(filename) - if (line.startswith('#ifndef %s' % cppvar) or - line.startswith('#define %s' % cppvar) or - line.startswith('#endif // %s' % cppvar)): - is_header_guard = True - # #include lines and header guards can be long, since there's no clean way to - # split them. - # - # URLs can be long too. It's possible to split these, but it makes them - # harder to cut&paste. - # - # The "$Id:...$" comment may also get very long without it being the - # developers fault. - # - # Doxygen documentation copying can get pretty long when using an overloaded - # function declaration - if (not line.startswith('#include') and not is_header_guard and - not Match(r'^\s*//.*http(s?)://\S*$', line) and - not Match(r'^\s*//\s*[^\s]*$', line) and - not Match(r'^// \$Id:.*#[0-9]+ \$$', line) and - not Match(r'^\s*/// [@\\](copydoc|copydetails|copybrief) .*$', line)): - line_width = GetLineWidth(line) - if line_width > _line_length: - error(filename, linenum, 'whitespace/line_length', 2, - 'Lines should be <= %i characters long' % _line_length) - - if (cleansed_line.count(';') > 1 and - # allow simple single line lambdas - not Match(r'^[^{};]*\[[^\[\]]*\][^{}]*\{[^{}\n\r]*\}', - line) and - # for loops are allowed two ;'s (and may run over two lines). - cleansed_line.find('for') == -1 and - (GetPreviousNonBlankLine(clean_lines, linenum)[0].find('for') == -1 or - GetPreviousNonBlankLine(clean_lines, linenum)[0].find(';') != -1) and - # It's ok to have many commands in a switch case that fits in 1 line - not ((cleansed_line.find('case ') != -1 or - cleansed_line.find('default:') != -1) and - cleansed_line.find('break;') != -1)): - error(filename, linenum, 'whitespace/newline', 0, - 'More than one command on the same line') - - # Some more style checks - CheckBraces(filename, clean_lines, linenum, error) - CheckTrailingSemicolon(filename, clean_lines, linenum, error) - CheckEmptyBlockBody(filename, clean_lines, linenum, error) - CheckSpacing(filename, clean_lines, linenum, nesting_state, error) - CheckOperatorSpacing(filename, clean_lines, linenum, error) - CheckParenthesisSpacing(filename, clean_lines, linenum, error) - CheckCommaSpacing(filename, clean_lines, linenum, error) - CheckBracesSpacing(filename, clean_lines, linenum, nesting_state, error) - CheckSpacingForFunctionCall(filename, clean_lines, linenum, error) - CheckCheck(filename, clean_lines, linenum, error) - CheckAltTokens(filename, clean_lines, linenum, error) - classinfo = nesting_state.InnermostClass() - if classinfo: - CheckSectionSpacing(filename, clean_lines, classinfo, linenum, error) - - -_RE_PATTERN_INCLUDE = re.compile(r'^\s*#\s*include\s*([<"])([^>"]*)[>"].*$') -# Matches the first component of a filename delimited by -s and _s. That is: -# _RE_FIRST_COMPONENT.match('foo').group(0) == 'foo' -# _RE_FIRST_COMPONENT.match('foo.cc').group(0) == 'foo' -# _RE_FIRST_COMPONENT.match('foo-bar_baz.cc').group(0) == 'foo' -# _RE_FIRST_COMPONENT.match('foo_bar-baz.cc').group(0) == 'foo' -_RE_FIRST_COMPONENT = re.compile(r'^[^-_.]+') - - -def _DropCommonSuffixes(filename): - """Drops common suffixes like _test.cc or -inl.h from filename. - - For example: - >>> _DropCommonSuffixes('foo/foo-inl.h') - 'foo/foo' - >>> _DropCommonSuffixes('foo/bar/foo.cc') - 'foo/bar/foo' - >>> _DropCommonSuffixes('foo/foo_internal.h') - 'foo/foo' - >>> _DropCommonSuffixes('foo/foo_unusualinternal.h') - 'foo/foo_unusualinternal' - - Args: - filename: The input filename. - - Returns: - The filename with the common suffix removed. - """ - for suffix in itertools.chain( - ('%s.%s' % (test_suffix.lstrip('_'), ext) - for test_suffix, ext in itertools.product(_test_suffixes, GetNonHeaderExtensions())), - ('%s.%s' % (suffix, ext) - for suffix, ext in itertools.product(['inl', 'imp', 'internal'], GetHeaderExtensions()))): - if (filename.endswith(suffix) and len(filename) > len(suffix) and - filename[-len(suffix) - 1] in ('-', '_')): - return filename[:-len(suffix) - 1] - return os.path.splitext(filename)[0] - - -def _ClassifyInclude(fileinfo, include, used_angle_brackets, include_order="default"): - """Figures out what kind of header 'include' is. - - Args: - fileinfo: The current file cpplint is running over. A FileInfo instance. - include: The path to a #included file. - used_angle_brackets: True if the #include used <> rather than "". - include_order: "default" or other value allowed in program arguments - - Returns: - One of the _XXX_HEADER constants. - - For example: - >>> _ClassifyInclude(FileInfo('foo/foo.cc'), 'stdio.h', True) - _C_SYS_HEADER - >>> _ClassifyInclude(FileInfo('foo/foo.cc'), 'string', True) - _CPP_SYS_HEADER - >>> _ClassifyInclude(FileInfo('foo/foo.cc'), 'foo/foo.h', True, "standardcfirst") - _OTHER_SYS_HEADER - >>> _ClassifyInclude(FileInfo('foo/foo.cc'), 'foo/foo.h', False) - _LIKELY_MY_HEADER - >>> _ClassifyInclude(FileInfo('foo/foo_unknown_extension.cc'), - ... 'bar/foo_other_ext.h', False) - _POSSIBLE_MY_HEADER - >>> _ClassifyInclude(FileInfo('foo/foo.cc'), 'foo/bar.h', False) - _OTHER_HEADER - """ - # This is a list of all standard c++ header files, except - # those already checked for above. - is_cpp_header = include in _CPP_HEADERS - - # Mark include as C header if in list or in a known folder for standard-ish C headers. - is_std_c_header = (include_order == "default") or (include in _C_HEADERS - # additional linux glibc header folders - or Search(r'(?:%s)\/.*\.h' % "|".join(C_STANDARD_HEADER_FOLDERS), include)) - - # Headers with C++ extensions shouldn't be considered C system headers - include_ext = os.path.splitext(include)[1] - is_system = used_angle_brackets and not include_ext in ['.hh', '.hpp', '.hxx', '.h++'] - - if is_system: - if is_cpp_header: - return _CPP_SYS_HEADER - if is_std_c_header: - return _C_SYS_HEADER - else: - return _OTHER_SYS_HEADER - - # If the target file and the include we're checking share a - # basename when we drop common extensions, and the include - # lives in . , then it's likely to be owned by the target file. - target_dir, target_base = ( - os.path.split(_DropCommonSuffixes(fileinfo.RepositoryName()))) - include_dir, include_base = os.path.split(_DropCommonSuffixes(include)) - target_dir_pub = os.path.normpath(target_dir + '/../public') - target_dir_pub = target_dir_pub.replace('\\', '/') - if target_base == include_base and ( - include_dir == target_dir or - include_dir == target_dir_pub): - return _LIKELY_MY_HEADER - - # If the target and include share some initial basename - # component, it's possible the target is implementing the - # include, so it's allowed to be first, but we'll never - # complain if it's not there. - target_first_component = _RE_FIRST_COMPONENT.match(target_base) - include_first_component = _RE_FIRST_COMPONENT.match(include_base) - if (target_first_component and include_first_component and - target_first_component.group(0) == - include_first_component.group(0)): - return _POSSIBLE_MY_HEADER - - return _OTHER_HEADER - - - -def CheckIncludeLine(filename, clean_lines, linenum, include_state, error): - """Check rules that are applicable to #include lines. - - Strings on #include lines are NOT removed from elided line, to make - certain tasks easier. However, to prevent false positives, checks - applicable to #include lines in CheckLanguage must be put here. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - include_state: An _IncludeState instance in which the headers are inserted. - error: The function to call with any errors found. - """ - fileinfo = FileInfo(filename) - line = clean_lines.lines[linenum] - - # "include" should use the new style "foo/bar.h" instead of just "bar.h" - # Only do this check if the included header follows google naming - # conventions. If not, assume that it's a 3rd party API that - # requires special include conventions. - # - # We also make an exception for Lua headers, which follow google - # naming convention but not the include convention. - match = Match(r'#include\s*"([^/]+\.(.*))"', line) - if match: - if (IsHeaderExtension(match.group(2)) and - not _THIRD_PARTY_HEADERS_PATTERN.match(match.group(1))): - error(filename, linenum, 'build/include_subdir', 4, - 'Include the directory when naming header files') - - # we shouldn't include a file more than once. actually, there are a - # handful of instances where doing so is okay, but in general it's - # not. - match = _RE_PATTERN_INCLUDE.search(line) - if match: - include = match.group(2) - used_angle_brackets = (match.group(1) == '<') - duplicate_line = include_state.FindHeader(include) - if duplicate_line >= 0: - error(filename, linenum, 'build/include', 4, - '"%s" already included at %s:%s' % - (include, filename, duplicate_line)) - return - - for extension in GetNonHeaderExtensions(): - if (include.endswith('.' + extension) and - os.path.dirname(fileinfo.RepositoryName()) != os.path.dirname(include)): - error(filename, linenum, 'build/include', 4, - 'Do not include .' + extension + ' files from other packages') - return - - # We DO want to include a 3rd party looking header if it matches the - # filename. Otherwise we get an erroneous error "...should include its - # header" error later. - third_src_header = False - for ext in GetHeaderExtensions(): - basefilename = filename[0:len(filename) - len(fileinfo.Extension())] - headerfile = basefilename + '.' + ext - headername = FileInfo(headerfile).RepositoryName() - if headername in include or include in headername: - third_src_header = True - break - - if third_src_header or not _THIRD_PARTY_HEADERS_PATTERN.match(include): - include_state.include_list[-1].append((include, linenum)) - - # We want to ensure that headers appear in the right order: - # 1) for foo.cc, foo.h (preferred location) - # 2) c system files - # 3) cpp system files - # 4) for foo.cc, foo.h (deprecated location) - # 5) other google headers - # - # We classify each include statement as one of those 5 types - # using a number of techniques. The include_state object keeps - # track of the highest type seen, and complains if we see a - # lower type after that. - error_message = include_state.CheckNextIncludeOrder( - _ClassifyInclude(fileinfo, include, used_angle_brackets, _include_order)) - if error_message: - error(filename, linenum, 'build/include_order', 4, - '%s. Should be: %s.h, c system, c++ system, other.' % - (error_message, fileinfo.BaseName())) - canonical_include = include_state.CanonicalizeAlphabeticalOrder(include) - if not include_state.IsInAlphabeticalOrder( - clean_lines, linenum, canonical_include): - error(filename, linenum, 'build/include_alpha', 4, - 'Include "%s" not in alphabetical order' % include) - include_state.SetLastHeader(canonical_include) - - - -def _GetTextInside(text, start_pattern): - r"""Retrieves all the text between matching open and close parentheses. - - Given a string of lines and a regular expression string, retrieve all the text - following the expression and between opening punctuation symbols like - (, [, or {, and the matching close-punctuation symbol. This properly nested - occurrences of the punctuations, so for the text like - printf(a(), b(c())); - a call to _GetTextInside(text, r'printf\(') will return 'a(), b(c())'. - start_pattern must match string having an open punctuation symbol at the end. - - Args: - text: The lines to extract text. Its comments and strings must be elided. - It can be single line and can span multiple lines. - start_pattern: The regexp string indicating where to start extracting - the text. - Returns: - The extracted text. - None if either the opening string or ending punctuation could not be found. - """ - # TODO(unknown): Audit cpplint.py to see what places could be profitably - # rewritten to use _GetTextInside (and use inferior regexp matching today). - - # Give opening punctuations to get the matching close-punctuations. - matching_punctuation = {'(': ')', '{': '}', '[': ']'} - closing_punctuation = set(itervalues(matching_punctuation)) - - # Find the position to start extracting text. - match = re.search(start_pattern, text, re.M) - if not match: # start_pattern not found in text. - return None - start_position = match.end(0) - - assert start_position > 0, ( - 'start_pattern must ends with an opening punctuation.') - assert text[start_position - 1] in matching_punctuation, ( - 'start_pattern must ends with an opening punctuation.') - # Stack of closing punctuations we expect to have in text after position. - punctuation_stack = [matching_punctuation[text[start_position - 1]]] - position = start_position - while punctuation_stack and position < len(text): - if text[position] == punctuation_stack[-1]: - punctuation_stack.pop() - elif text[position] in closing_punctuation: - # A closing punctuation without matching opening punctuations. - return None - elif text[position] in matching_punctuation: - punctuation_stack.append(matching_punctuation[text[position]]) - position += 1 - if punctuation_stack: - # Opening punctuations left without matching close-punctuations. - return None - # punctuations match. - return text[start_position:position - 1] - - -# Patterns for matching call-by-reference parameters. -# -# Supports nested templates up to 2 levels deep using this messy pattern: -# < (?: < (?: < [^<>]* -# > -# | [^<>] )* -# > -# | [^<>] )* -# > -_RE_PATTERN_IDENT = r'[_a-zA-Z]\w*' # =~ [[:alpha:]][[:alnum:]]* -_RE_PATTERN_TYPE = ( - r'(?:const\s+)?(?:typename\s+|class\s+|struct\s+|union\s+|enum\s+)?' - r'(?:\w|' - r'\s*<(?:<(?:<[^<>]*>|[^<>])*>|[^<>])*>|' - r'::)+') -# A call-by-reference parameter ends with '& identifier'. -_RE_PATTERN_REF_PARAM = re.compile( - r'(' + _RE_PATTERN_TYPE + r'(?:\s*(?:\bconst\b|[*]))*\s*' - r'&\s*' + _RE_PATTERN_IDENT + r')\s*(?:=[^,()]+)?[,)]') -# A call-by-const-reference parameter either ends with 'const& identifier' -# or looks like 'const type& identifier' when 'type' is atomic. -_RE_PATTERN_CONST_REF_PARAM = ( - r'(?:.*\s*\bconst\s*&\s*' + _RE_PATTERN_IDENT + - r'|const\s+' + _RE_PATTERN_TYPE + r'\s*&\s*' + _RE_PATTERN_IDENT + r')') -# Stream types. -_RE_PATTERN_REF_STREAM_PARAM = ( - r'(?:.*stream\s*&\s*' + _RE_PATTERN_IDENT + r')') - - -def CheckLanguage(filename, clean_lines, linenum, file_extension, - include_state, nesting_state, error): - """Checks rules from the 'C++ language rules' section of cppguide.html. - - Some of these rules are hard to test (function overloading, using - uint32 inappropriately), but we do the best we can. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - file_extension: The extension (without the dot) of the filename. - include_state: An _IncludeState instance in which the headers are inserted. - nesting_state: A NestingState instance which maintains information about - the current stack of nested blocks being parsed. - error: The function to call with any errors found. - """ - # If the line is empty or consists of entirely a comment, no need to - # check it. - line = clean_lines.elided[linenum] - if not line: - return - - match = _RE_PATTERN_INCLUDE.search(line) - if match: - CheckIncludeLine(filename, clean_lines, linenum, include_state, error) - return - - # Reset include state across preprocessor directives. This is meant - # to silence warnings for conditional includes. - match = Match(r'^\s*#\s*(if|ifdef|ifndef|elif|else|endif)\b', line) - if match: - include_state.ResetSection(match.group(1)) - - - # Perform other checks now that we are sure that this is not an include line - CheckCasts(filename, clean_lines, linenum, error) - CheckGlobalStatic(filename, clean_lines, linenum, error) - CheckPrintf(filename, clean_lines, linenum, error) - - if IsHeaderExtension(file_extension): - # TODO(unknown): check that 1-arg constructors are explicit. - # How to tell it's a constructor? - # (handled in CheckForNonStandardConstructs for now) - # TODO(unknown): check that classes declare or disable copy/assign - # (level 1 error) - pass - - # Check if people are using the verboten C basic types. The only exception - # we regularly allow is "unsigned short port" for port. - if Search(r'\bshort port\b', line): - if not Search(r'\bunsigned short port\b', line): - error(filename, linenum, 'runtime/int', 4, - 'Use "unsigned short" for ports, not "short"') - else: - match = Search(r'\b(short|long(?! +double)|long long)\b', line) - if match: - error(filename, linenum, 'runtime/int', 4, - 'Use int16/int64/etc, rather than the C type %s' % match.group(1)) - - # Check if some verboten operator overloading is going on - # TODO(unknown): catch out-of-line unary operator&: - # class X {}; - # int operator&(const X& x) { return 42; } // unary operator& - # The trick is it's hard to tell apart from binary operator&: - # class Y { int operator&(const Y& x) { return 23; } }; // binary operator& - if Search(r'\boperator\s*&\s*\(\s*\)', line): - error(filename, linenum, 'runtime/operator', 4, - 'Unary operator& is dangerous. Do not use it.') - - # Check for suspicious usage of "if" like - # } if (a == b) { - if Search(r'\}\s*if\s*\(', line): - error(filename, linenum, 'readability/braces', 4, - 'Did you mean "else if"? If not, start a new line for "if".') - - # Check for potential format string bugs like printf(foo). - # We constrain the pattern not to pick things like DocidForPrintf(foo). - # Not perfect but it can catch printf(foo.c_str()) and printf(foo->c_str()) - # TODO(unknown): Catch the following case. Need to change the calling - # convention of the whole function to process multiple line to handle it. - # printf( - # boy_this_is_a_really_long_variable_that_cannot_fit_on_the_prev_line); - printf_args = _GetTextInside(line, r'(?i)\b(string)?printf\s*\(') - if printf_args: - match = Match(r'([\w.\->()]+)$', printf_args) - if match and match.group(1) != '__VA_ARGS__': - function_name = re.search(r'\b((?:string)?printf)\s*\(', - line, re.I).group(1) - error(filename, linenum, 'runtime/printf', 4, - 'Potential format string bug. Do %s("%%s", %s) instead.' - % (function_name, match.group(1))) - - # Check for potential memset bugs like memset(buf, sizeof(buf), 0). - match = Search(r'memset\s*\(([^,]*),\s*([^,]*),\s*0\s*\)', line) - if match and not Match(r"^''|-?[0-9]+|0x[0-9A-Fa-f]$", match.group(2)): - error(filename, linenum, 'runtime/memset', 4, - 'Did you mean "memset(%s, 0, %s)"?' - % (match.group(1), match.group(2))) - - if Search(r'\busing namespace\b', line): - if Search(r'\bliterals\b', line): - error(filename, linenum, 'build/namespaces_literals', 5, - 'Do not use namespace using-directives. ' - 'Use using-declarations instead.') - else: - error(filename, linenum, 'build/namespaces', 5, - 'Do not use namespace using-directives. ' - 'Use using-declarations instead.') - - # Detect variable-length arrays. - match = Match(r'\s*(.+::)?(\w+) [a-z]\w*\[(.+)];', line) - if (match and match.group(2) != 'return' and match.group(2) != 'delete' and - match.group(3).find(']') == -1): - # Split the size using space and arithmetic operators as delimiters. - # If any of the resulting tokens are not compile time constants then - # report the error. - tokens = re.split(r'\s|\+|\-|\*|\/|<<|>>]', match.group(3)) - is_const = True - skip_next = False - for tok in tokens: - if skip_next: - skip_next = False - continue - - if Search(r'sizeof\(.+\)', tok): continue - if Search(r'arraysize\(\w+\)', tok): continue - - tok = tok.lstrip('(') - tok = tok.rstrip(')') - if not tok: continue - if Match(r'\d+', tok): continue - if Match(r'0[xX][0-9a-fA-F]+', tok): continue - if Match(r'k[A-Z0-9]\w*', tok): continue - if Match(r'(.+::)?k[A-Z0-9]\w*', tok): continue - if Match(r'(.+::)?[A-Z][A-Z0-9_]*', tok): continue - # A catch all for tricky sizeof cases, including 'sizeof expression', - # 'sizeof(*type)', 'sizeof(const type)', 'sizeof(struct StructName)' - # requires skipping the next token because we split on ' ' and '*'. - if tok.startswith('sizeof'): - skip_next = True - continue - is_const = False - break - if not is_const: - error(filename, linenum, 'runtime/arrays', 1, - 'Do not use variable-length arrays. Use an appropriately named ' - "('k' followed by CamelCase) compile-time constant for the size.") - - # Check for use of unnamed namespaces in header files. Registration - # macros are typically OK, so we allow use of "namespace {" on lines - # that end with backslashes. - if (IsHeaderExtension(file_extension) - and Search(r'\bnamespace\s*{', line) - and line[-1] != '\\'): - error(filename, linenum, 'build/namespaces_headers', 4, - 'Do not use unnamed namespaces in header files. See ' - 'https://google-styleguide.googlecode.com/svn/trunk/cppguide.xml#Namespaces' - ' for more information.') - - -def CheckGlobalStatic(filename, clean_lines, linenum, error): - """Check for unsafe global or static objects. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ - line = clean_lines.elided[linenum] - - # Match two lines at a time to support multiline declarations - if linenum + 1 < clean_lines.NumLines() and not Search(r'[;({]', line): - line += clean_lines.elided[linenum + 1].strip() - - # Check for people declaring static/global STL strings at the top level. - # This is dangerous because the C++ language does not guarantee that - # globals with constructors are initialized before the first access, and - # also because globals can be destroyed when some threads are still running. - # TODO(unknown): Generalize this to also find static unique_ptr instances. - # TODO(unknown): File bugs for clang-tidy to find these. - match = Match( - r'((?:|static +)(?:|const +))(?::*std::)?string( +const)? +' - r'([a-zA-Z0-9_:]+)\b(.*)', - line) - - # Remove false positives: - # - String pointers (as opposed to values). - # string *pointer - # const string *pointer - # string const *pointer - # string *const pointer - # - # - Functions and template specializations. - # string Function(... - # string Class::Method(... - # - # - Operators. These are matched separately because operator names - # cross non-word boundaries, and trying to match both operators - # and functions at the same time would decrease accuracy of - # matching identifiers. - # string Class::operator*() - if (match and - not Search(r'\bstring\b(\s+const)?\s*[\*\&]\s*(const\s+)?\w', line) and - not Search(r'\boperator\W', line) and - not Match(r'\s*(<.*>)?(::[a-zA-Z0-9_]+)*\s*\(([^"]|$)', match.group(4))): - if Search(r'\bconst\b', line): - error(filename, linenum, 'runtime/string', 4, - 'For a static/global string constant, use a C style string ' - 'instead: "%schar%s %s[]".' % - (match.group(1), match.group(2) or '', match.group(3))) - else: - error(filename, linenum, 'runtime/string', 4, - 'Static/global string variables are not permitted.') - - if (Search(r'\b([A-Za-z0-9_]*_)\(\1\)', line) or - Search(r'\b([A-Za-z0-9_]*_)\(CHECK_NOTNULL\(\1\)\)', line)): - error(filename, linenum, 'runtime/init', 4, - 'You seem to be initializing a member variable with itself.') - - -def CheckPrintf(filename, clean_lines, linenum, error): - """Check for printf related issues. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ - line = clean_lines.elided[linenum] - - # When snprintf is used, the second argument shouldn't be a literal. - match = Search(r'snprintf\s*\(([^,]*),\s*([0-9]*)\s*,', line) - if match and match.group(2) != '0': - # If 2nd arg is zero, snprintf is used to calculate size. - error(filename, linenum, 'runtime/printf', 3, - 'If you can, use sizeof(%s) instead of %s as the 2nd arg ' - 'to snprintf.' % (match.group(1), match.group(2))) - - # Check if some verboten C functions are being used. - if Search(r'\bsprintf\s*\(', line): - error(filename, linenum, 'runtime/printf', 5, - 'Never use sprintf. Use snprintf instead.') - match = Search(r'\b(strcpy|strcat)\s*\(', line) - if match: - error(filename, linenum, 'runtime/printf', 4, - 'Almost always, snprintf is better than %s' % match.group(1)) - - -def IsDerivedFunction(clean_lines, linenum): - """Check if current line contains an inherited function. - - Args: - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - Returns: - True if current line contains a function with "override" - virt-specifier. - """ - # Scan back a few lines for start of current function - for i in xrange(linenum, max(-1, linenum - 10), -1): - match = Match(r'^([^()]*\w+)\(', clean_lines.elided[i]) - if match: - # Look for "override" after the matching closing parenthesis - line, _, closing_paren = CloseExpression( - clean_lines, i, len(match.group(1))) - return (closing_paren >= 0 and - Search(r'\boverride\b', line[closing_paren:])) - return False - - -def IsOutOfLineMethodDefinition(clean_lines, linenum): - """Check if current line contains an out-of-line method definition. - - Args: - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - Returns: - True if current line contains an out-of-line method definition. - """ - # Scan back a few lines for start of current function - for i in xrange(linenum, max(-1, linenum - 10), -1): - if Match(r'^([^()]*\w+)\(', clean_lines.elided[i]): - return Match(r'^[^()]*\w+::\w+\(', clean_lines.elided[i]) is not None - return False - - -def IsInitializerList(clean_lines, linenum): - """Check if current line is inside constructor initializer list. - - Args: - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - Returns: - True if current line appears to be inside constructor initializer - list, False otherwise. - """ - for i in xrange(linenum, 1, -1): - line = clean_lines.elided[i] - if i == linenum: - remove_function_body = Match(r'^(.*)\{\s*$', line) - if remove_function_body: - line = remove_function_body.group(1) - - if Search(r'\s:\s*\w+[({]', line): - # A lone colon tend to indicate the start of a constructor - # initializer list. It could also be a ternary operator, which - # also tend to appear in constructor initializer lists as - # opposed to parameter lists. - return True - if Search(r'\}\s*,\s*$', line): - # A closing brace followed by a comma is probably the end of a - # brace-initialized member in constructor initializer list. - return True - if Search(r'[{};]\s*$', line): - # Found one of the following: - # - A closing brace or semicolon, probably the end of the previous - # function. - # - An opening brace, probably the start of current class or namespace. - # - # Current line is probably not inside an initializer list since - # we saw one of those things without seeing the starting colon. - return False - - # Got to the beginning of the file without seeing the start of - # constructor initializer list. - return False - - -def CheckForNonConstReference(filename, clean_lines, linenum, - nesting_state, error): - """Check for non-const references. - - Separate from CheckLanguage since it scans backwards from current - line, instead of scanning forward. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - nesting_state: A NestingState instance which maintains information about - the current stack of nested blocks being parsed. - error: The function to call with any errors found. - """ - # Do nothing if there is no '&' on current line. - line = clean_lines.elided[linenum] - if '&' not in line: - return - - # If a function is inherited, current function doesn't have much of - # a choice, so any non-const references should not be blamed on - # derived function. - if IsDerivedFunction(clean_lines, linenum): - return - - # Don't warn on out-of-line method definitions, as we would warn on the - # in-line declaration, if it isn't marked with 'override'. - if IsOutOfLineMethodDefinition(clean_lines, linenum): - return - - # Long type names may be broken across multiple lines, usually in one - # of these forms: - # LongType - # ::LongTypeContinued &identifier - # LongType:: - # LongTypeContinued &identifier - # LongType< - # ...>::LongTypeContinued &identifier - # - # If we detected a type split across two lines, join the previous - # line to current line so that we can match const references - # accordingly. - # - # Note that this only scans back one line, since scanning back - # arbitrary number of lines would be expensive. If you have a type - # that spans more than 2 lines, please use a typedef. - if linenum > 1: - previous = None - if Match(r'\s*::(?:[\w<>]|::)+\s*&\s*\S', line): - # previous_line\n + ::current_line - previous = Search(r'\b((?:const\s*)?(?:[\w<>]|::)+[\w<>])\s*$', - clean_lines.elided[linenum - 1]) - elif Match(r'\s*[a-zA-Z_]([\w<>]|::)+\s*&\s*\S', line): - # previous_line::\n + current_line - previous = Search(r'\b((?:const\s*)?(?:[\w<>]|::)+::)\s*$', - clean_lines.elided[linenum - 1]) - if previous: - line = previous.group(1) + line.lstrip() - else: - # Check for templated parameter that is split across multiple lines - endpos = line.rfind('>') - if endpos > -1: - (_, startline, startpos) = ReverseCloseExpression( - clean_lines, linenum, endpos) - if startpos > -1 and startline < linenum: - # Found the matching < on an earlier line, collect all - # pieces up to current line. - line = '' - for i in xrange(startline, linenum + 1): - line += clean_lines.elided[i].strip() - - # Check for non-const references in function parameters. A single '&' may - # found in the following places: - # inside expression: binary & for bitwise AND - # inside expression: unary & for taking the address of something - # inside declarators: reference parameter - # We will exclude the first two cases by checking that we are not inside a - # function body, including one that was just introduced by a trailing '{'. - # TODO(unknown): Doesn't account for 'catch(Exception& e)' [rare]. - if (nesting_state.previous_stack_top and - not (isinstance(nesting_state.previous_stack_top, _ClassInfo) or - isinstance(nesting_state.previous_stack_top, _NamespaceInfo))): - # Not at toplevel, not within a class, and not within a namespace - return - - # Avoid initializer lists. We only need to scan back from the - # current line for something that starts with ':'. - # - # We don't need to check the current line, since the '&' would - # appear inside the second set of parentheses on the current line as - # opposed to the first set. - if linenum > 0: - for i in xrange(linenum - 1, max(0, linenum - 10), -1): - previous_line = clean_lines.elided[i] - if not Search(r'[),]\s*$', previous_line): - break - if Match(r'^\s*:\s+\S', previous_line): - return - - # Avoid preprocessors - if Search(r'\\\s*$', line): - return - - # Avoid constructor initializer lists - if IsInitializerList(clean_lines, linenum): - return - - # We allow non-const references in a few standard places, like functions - # called "swap()" or iostream operators like "<<" or ">>". Do not check - # those function parameters. - # - # We also accept & in static_assert, which looks like a function but - # it's actually a declaration expression. - allowed_functions = (r'(?:[sS]wap(?:<\w:+>)?|' - r'operator\s*[<>][<>]|' - r'static_assert|COMPILE_ASSERT' - r')\s*\(') - if Search(allowed_functions, line): - return - elif not Search(r'\S+\([^)]*$', line): - # Don't see an allowed function on this line. Actually we - # didn't see any function name on this line, so this is likely a - # multi-line parameter list. Try a bit harder to catch this case. - for i in xrange(2): - if (linenum > i and - Search(allowed_functions, clean_lines.elided[linenum - i - 1])): - return - - decls = ReplaceAll(r'{[^}]*}', ' ', line) # exclude function body - for parameter in re.findall(_RE_PATTERN_REF_PARAM, decls): - if (not Match(_RE_PATTERN_CONST_REF_PARAM, parameter) and - not Match(_RE_PATTERN_REF_STREAM_PARAM, parameter)): - error(filename, linenum, 'runtime/references', 2, - 'Is this a non-const reference? ' - 'If so, make const or use a pointer: ' + - ReplaceAll(' *<', '<', parameter)) - - -def CheckCasts(filename, clean_lines, linenum, error): - """Various cast related checks. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ - line = clean_lines.elided[linenum] - - # Check to see if they're using an conversion function cast. - # I just try to capture the most common basic types, though there are more. - # Parameterless conversion functions, such as bool(), are allowed as they are - # probably a member operator declaration or default constructor. - match = Search( - r'(\bnew\s+(?:const\s+)?|\S<\s*(?:const\s+)?)?\b' - r'(int|float|double|bool|char|int32|uint32|int64|uint64)' - r'(\([^)].*)', line) - expecting_function = ExpectingFunctionArgs(clean_lines, linenum) - if match and not expecting_function: - matched_type = match.group(2) - - # matched_new_or_template is used to silence two false positives: - # - New operators - # - Template arguments with function types - # - # For template arguments, we match on types immediately following - # an opening bracket without any spaces. This is a fast way to - # silence the common case where the function type is the first - # template argument. False negative with less-than comparison is - # avoided because those operators are usually followed by a space. - # - # function // bracket + no space = false positive - # value < double(42) // bracket + space = true positive - matched_new_or_template = match.group(1) - - # Avoid arrays by looking for brackets that come after the closing - # parenthesis. - if Match(r'\([^()]+\)\s*\[', match.group(3)): - return - - # Other things to ignore: - # - Function pointers - # - Casts to pointer types - # - Placement new - # - Alias declarations - matched_funcptr = match.group(3) - if (matched_new_or_template is None and - not (matched_funcptr and - (Match(r'\((?:[^() ]+::\s*\*\s*)?[^() ]+\)\s*\(', - matched_funcptr) or - matched_funcptr.startswith('(*)'))) and - not Match(r'\s*using\s+\S+\s*=\s*' + matched_type, line) and - not Search(r'new\(\S+\)\s*' + matched_type, line)): - error(filename, linenum, 'readability/casting', 4, - 'Using deprecated casting style. ' - 'Use static_cast<%s>(...) instead' % - matched_type) - - if not expecting_function: - CheckCStyleCast(filename, clean_lines, linenum, 'static_cast', - r'\((int|float|double|bool|char|u?int(16|32|64)|size_t)\)', error) - - # This doesn't catch all cases. Consider (const char * const)"hello". - # - # (char *) "foo" should always be a const_cast (reinterpret_cast won't - # compile). - if CheckCStyleCast(filename, clean_lines, linenum, 'const_cast', - r'\((char\s?\*+\s?)\)\s*"', error): - pass - else: - # Check pointer casts for other than string constants - CheckCStyleCast(filename, clean_lines, linenum, 'reinterpret_cast', - r'\((\w+\s?\*+\s?)\)', error) - - # In addition, we look for people taking the address of a cast. This - # is dangerous -- casts can assign to temporaries, so the pointer doesn't - # point where you think. - # - # Some non-identifier character is required before the '&' for the - # expression to be recognized as a cast. These are casts: - # expression = &static_cast(temporary()); - # function(&(int*)(temporary())); - # - # This is not a cast: - # reference_type&(int* function_param); - match = Search( - r'(?:[^\w]&\(([^)*][^)]*)\)[\w(])|' - r'(?:[^\w]&(static|dynamic|down|reinterpret)_cast\b)', line) - if match: - # Try a better error message when the & is bound to something - # dereferenced by the casted pointer, as opposed to the casted - # pointer itself. - parenthesis_error = False - match = Match(r'^(.*&(?:static|dynamic|down|reinterpret)_cast\b)<', line) - if match: - _, y1, x1 = CloseExpression(clean_lines, linenum, len(match.group(1))) - if x1 >= 0 and clean_lines.elided[y1][x1] == '(': - _, y2, x2 = CloseExpression(clean_lines, y1, x1) - if x2 >= 0: - extended_line = clean_lines.elided[y2][x2:] - if y2 < clean_lines.NumLines() - 1: - extended_line += clean_lines.elided[y2 + 1] - if Match(r'\s*(?:->|\[)', extended_line): - parenthesis_error = True - - if parenthesis_error: - error(filename, linenum, 'readability/casting', 4, - ('Are you taking an address of something dereferenced ' - 'from a cast? Wrapping the dereferenced expression in ' - 'parentheses will make the binding more obvious')) - else: - error(filename, linenum, 'runtime/casting', 4, - ('Are you taking an address of a cast? ' - 'This is dangerous: could be a temp var. ' - 'Take the address before doing the cast, rather than after')) - - -def CheckCStyleCast(filename, clean_lines, linenum, cast_type, pattern, error): - """Checks for a C-style cast by looking for the pattern. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - cast_type: The string for the C++ cast to recommend. This is either - reinterpret_cast, static_cast, or const_cast, depending. - pattern: The regular expression used to find C-style casts. - error: The function to call with any errors found. - - Returns: - True if an error was emitted. - False otherwise. - """ - line = clean_lines.elided[linenum] - match = Search(pattern, line) - if not match: - return False - - # Exclude lines with keywords that tend to look like casts - context = line[0:match.start(1) - 1] - if Match(r'.*\b(?:sizeof|alignof|alignas|[_A-Z][_A-Z0-9]*)\s*$', context): - return False - - # Try expanding current context to see if we one level of - # parentheses inside a macro. - if linenum > 0: - for i in xrange(linenum - 1, max(0, linenum - 5), -1): - context = clean_lines.elided[i] + context - if Match(r'.*\b[_A-Z][_A-Z0-9]*\s*\((?:\([^()]*\)|[^()])*$', context): - return False - - # operator++(int) and operator--(int) - if (context.endswith(' operator++') or context.endswith(' operator--') or - context.endswith('::operator++') or context.endswith('::operator--')): - return False - - # A single unnamed argument for a function tends to look like old style cast. - # If we see those, don't issue warnings for deprecated casts. - remainder = line[match.end(0):] - if Match(r'^\s*(?:;|const\b|throw\b|final\b|override\b|[=>{),]|->)', - remainder): - return False - - # At this point, all that should be left is actual casts. - error(filename, linenum, 'readability/casting', 4, - 'Using C-style cast. Use %s<%s>(...) instead' % - (cast_type, match.group(1))) - - return True - - -def ExpectingFunctionArgs(clean_lines, linenum): - """Checks whether where function type arguments are expected. - - Args: - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - - Returns: - True if the line at 'linenum' is inside something that expects arguments - of function types. - """ - line = clean_lines.elided[linenum] - return (Match(r'^\s*MOCK_(CONST_)?METHOD\d+(_T)?\(', line) or - (linenum >= 2 and - (Match(r'^\s*MOCK_(?:CONST_)?METHOD\d+(?:_T)?\((?:\S+,)?\s*$', - clean_lines.elided[linenum - 1]) or - Match(r'^\s*MOCK_(?:CONST_)?METHOD\d+(?:_T)?\(\s*$', - clean_lines.elided[linenum - 2]) or - Search(r'\bstd::m?function\s*\<\s*$', - clean_lines.elided[linenum - 1])))) - - -_HEADERS_CONTAINING_TEMPLATES = ( - ('', ('deque',)), - ('', ('unary_function', 'binary_function', - 'plus', 'minus', 'multiplies', 'divides', 'modulus', - 'negate', - 'equal_to', 'not_equal_to', 'greater', 'less', - 'greater_equal', 'less_equal', - 'logical_and', 'logical_or', 'logical_not', - 'unary_negate', 'not1', 'binary_negate', 'not2', - 'bind1st', 'bind2nd', - 'pointer_to_unary_function', - 'pointer_to_binary_function', - 'ptr_fun', - 'mem_fun_t', 'mem_fun', 'mem_fun1_t', 'mem_fun1_ref_t', - 'mem_fun_ref_t', - 'const_mem_fun_t', 'const_mem_fun1_t', - 'const_mem_fun_ref_t', 'const_mem_fun1_ref_t', - 'mem_fun_ref', - )), - ('', ('numeric_limits',)), - ('', ('list',)), - ('', ('multimap',)), - ('', ('allocator', 'make_shared', 'make_unique', 'shared_ptr', - 'unique_ptr', 'weak_ptr')), - ('', ('queue', 'priority_queue',)), - ('', ('multiset',)), - ('', ('stack',)), - ('', ('char_traits', 'basic_string',)), - ('', ('tuple',)), - ('', ('unordered_map', 'unordered_multimap')), - ('', ('unordered_set', 'unordered_multiset')), - ('', ('pair',)), - ('', ('vector',)), - - # gcc extensions. - # Note: std::hash is their hash, ::hash is our hash - ('', ('hash_map', 'hash_multimap',)), - ('', ('hash_set', 'hash_multiset',)), - ('', ('slist',)), - ) - -_HEADERS_MAYBE_TEMPLATES = ( - ('', ('copy', 'max', 'min', 'min_element', 'sort', - 'transform', - )), - ('', ('forward', 'make_pair', 'move', 'swap')), - ) - -_RE_PATTERN_STRING = re.compile(r'\bstring\b') - -_re_pattern_headers_maybe_templates = [] -for _header, _templates in _HEADERS_MAYBE_TEMPLATES: - for _template in _templates: - # Match max(..., ...), max(..., ...), but not foo->max, foo.max or - # 'type::max()'. - _re_pattern_headers_maybe_templates.append( - (re.compile(r'[^>.]\b' + _template + r'(<.*?>)?\([^\)]'), - _template, - _header)) -# Match set, but not foo->set, foo.set -_re_pattern_headers_maybe_templates.append( - (re.compile(r'[^>.]\bset\s*\<'), - 'set<>', - '')) -# Match 'map var' and 'std::map(...)', but not 'map(...)'' -_re_pattern_headers_maybe_templates.append( - (re.compile(r'(std\b::\bmap\s*\<)|(^(std\b::\b)map\b\(\s*\<)'), - 'map<>', - '')) - -# Other scripts may reach in and modify this pattern. -_re_pattern_templates = [] -for _header, _templates in _HEADERS_CONTAINING_TEMPLATES: - for _template in _templates: - _re_pattern_templates.append( - (re.compile(r'(\<|\b)' + _template + r'\s*\<'), - _template + '<>', - _header)) - - -def FilesBelongToSameModule(filename_cc, filename_h): - """Check if these two filenames belong to the same module. - - The concept of a 'module' here is a as follows: - foo.h, foo-inl.h, foo.cc, foo_test.cc and foo_unittest.cc belong to the - same 'module' if they are in the same directory. - some/path/public/xyzzy and some/path/internal/xyzzy are also considered - to belong to the same module here. - - If the filename_cc contains a longer path than the filename_h, for example, - '/absolute/path/to/base/sysinfo.cc', and this file would include - 'base/sysinfo.h', this function also produces the prefix needed to open the - header. This is used by the caller of this function to more robustly open the - header file. We don't have access to the real include paths in this context, - so we need this guesswork here. - - Known bugs: tools/base/bar.cc and base/bar.h belong to the same module - according to this implementation. Because of this, this function gives - some false positives. This should be sufficiently rare in practice. - - Args: - filename_cc: is the path for the source (e.g. .cc) file - filename_h: is the path for the header path - - Returns: - Tuple with a bool and a string: - bool: True if filename_cc and filename_h belong to the same module. - string: the additional prefix needed to open the header file. - """ - fileinfo_cc = FileInfo(filename_cc) - if not fileinfo_cc.Extension().lstrip('.') in GetNonHeaderExtensions(): - return (False, '') - - fileinfo_h = FileInfo(filename_h) - if not IsHeaderExtension(fileinfo_h.Extension().lstrip('.')): - return (False, '') - - filename_cc = filename_cc[:-(len(fileinfo_cc.Extension()))] - matched_test_suffix = Search(_TEST_FILE_SUFFIX, fileinfo_cc.BaseName()) - if matched_test_suffix: - filename_cc = filename_cc[:-len(matched_test_suffix.group(1))] - - filename_cc = filename_cc.replace('/public/', '/') - filename_cc = filename_cc.replace('/internal/', '/') - - filename_h = filename_h[:-(len(fileinfo_h.Extension()))] - if filename_h.endswith('-inl'): - filename_h = filename_h[:-len('-inl')] - filename_h = filename_h.replace('/public/', '/') - filename_h = filename_h.replace('/internal/', '/') - - files_belong_to_same_module = filename_cc.endswith(filename_h) - common_path = '' - if files_belong_to_same_module: - common_path = filename_cc[:-len(filename_h)] - return files_belong_to_same_module, common_path - - -def UpdateIncludeState(filename, include_dict, io=codecs): - """Fill up the include_dict with new includes found from the file. - - Args: - filename: the name of the header to read. - include_dict: a dictionary in which the headers are inserted. - io: The io factory to use to read the file. Provided for testability. - - Returns: - True if a header was successfully added. False otherwise. - """ - headerfile = None - try: - with io.open(filename, 'r', 'utf8', 'replace') as headerfile: - linenum = 0 - for line in headerfile: - linenum += 1 - clean_line = CleanseComments(line) - match = _RE_PATTERN_INCLUDE.search(clean_line) - if match: - include = match.group(2) - include_dict.setdefault(include, linenum) - return True - except IOError: - return False - - - -def CheckForIncludeWhatYouUse(filename, clean_lines, include_state, error, - io=codecs): - """Reports for missing stl includes. - - This function will output warnings to make sure you are including the headers - necessary for the stl containers and functions that you use. We only give one - reason to include a header. For example, if you use both equal_to<> and - less<> in a .h file, only one (the latter in the file) of these will be - reported as a reason to include the . - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - include_state: An _IncludeState instance. - error: The function to call with any errors found. - io: The IO factory to use to read the header file. Provided for unittest - injection. - """ - required = {} # A map of header name to linenumber and the template entity. - # Example of required: { '': (1219, 'less<>') } - - for linenum in xrange(clean_lines.NumLines()): - line = clean_lines.elided[linenum] - if not line or line[0] == '#': - continue - - # String is special -- it is a non-templatized type in STL. - matched = _RE_PATTERN_STRING.search(line) - if matched: - # Don't warn about strings in non-STL namespaces: - # (We check only the first match per line; good enough.) - prefix = line[:matched.start()] - if prefix.endswith('std::') or not prefix.endswith('::'): - required[''] = (linenum, 'string') - - for pattern, template, header in _re_pattern_headers_maybe_templates: - if pattern.search(line): - required[header] = (linenum, template) - - # The following function is just a speed up, no semantics are changed. - if not '<' in line: # Reduces the cpu time usage by skipping lines. - continue - - for pattern, template, header in _re_pattern_templates: - matched = pattern.search(line) - if matched: - # Don't warn about IWYU in non-STL namespaces: - # (We check only the first match per line; good enough.) - prefix = line[:matched.start()] - if prefix.endswith('std::') or not prefix.endswith('::'): - required[header] = (linenum, template) - - # The policy is that if you #include something in foo.h you don't need to - # include it again in foo.cc. Here, we will look at possible includes. - # Let's flatten the include_state include_list and copy it into a dictionary. - include_dict = dict([item for sublist in include_state.include_list - for item in sublist]) - - # Did we find the header for this file (if any) and successfully load it? - header_found = False - - # Use the absolute path so that matching works properly. - abs_filename = FileInfo(filename).FullName() - - # For Emacs's flymake. - # If cpplint is invoked from Emacs's flymake, a temporary file is generated - # by flymake and that file name might end with '_flymake.cc'. In that case, - # restore original file name here so that the corresponding header file can be - # found. - # e.g. If the file name is 'foo_flymake.cc', we should search for 'foo.h' - # instead of 'foo_flymake.h' - abs_filename = re.sub(r'_flymake\.cc$', '.cc', abs_filename) - - # include_dict is modified during iteration, so we iterate over a copy of - # the keys. - header_keys = list(include_dict.keys()) - for header in header_keys: - (same_module, common_path) = FilesBelongToSameModule(abs_filename, header) - fullpath = common_path + header - if same_module and UpdateIncludeState(fullpath, include_dict, io): - header_found = True - - # If we can't find the header file for a .cc, assume it's because we don't - # know where to look. In that case we'll give up as we're not sure they - # didn't include it in the .h file. - # TODO(unknown): Do a better job of finding .h files so we are confident that - # not having the .h file means there isn't one. - if not header_found: - for extension in GetNonHeaderExtensions(): - if filename.endswith('.' + extension): - return - - # All the lines have been processed, report the errors found. - for required_header_unstripped in sorted(required, key=required.__getitem__): - template = required[required_header_unstripped][1] - if required_header_unstripped.strip('<>"') not in include_dict: - error(filename, required[required_header_unstripped][0], - 'build/include_what_you_use', 4, - 'Add #include ' + required_header_unstripped + ' for ' + template) - - -_RE_PATTERN_EXPLICIT_MAKEPAIR = re.compile(r'\bmake_pair\s*<') - - -def CheckMakePairUsesDeduction(filename, clean_lines, linenum, error): - """Check that make_pair's template arguments are deduced. - - G++ 4.6 in C++11 mode fails badly if make_pair's template arguments are - specified explicitly, and such use isn't intended in any case. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ - line = clean_lines.elided[linenum] - match = _RE_PATTERN_EXPLICIT_MAKEPAIR.search(line) - if match: - error(filename, linenum, 'build/explicit_make_pair', - 4, # 4 = high confidence - 'For C++11-compatibility, omit template arguments from make_pair' - ' OR use pair directly OR if appropriate, construct a pair directly') - - -def CheckRedundantVirtual(filename, clean_lines, linenum, error): - """Check if line contains a redundant "virtual" function-specifier. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ - # Look for "virtual" on current line. - line = clean_lines.elided[linenum] - virtual = Match(r'^(.*)(\bvirtual\b)(.*)$', line) - if not virtual: return - - # Ignore "virtual" keywords that are near access-specifiers. These - # are only used in class base-specifier and do not apply to member - # functions. - if (Search(r'\b(public|protected|private)\s+$', virtual.group(1)) or - Match(r'^\s+(public|protected|private)\b', virtual.group(3))): - return - - # Ignore the "virtual" keyword from virtual base classes. Usually - # there is a column on the same line in these cases (virtual base - # classes are rare in google3 because multiple inheritance is rare). - if Match(r'^.*[^:]:[^:].*$', line): return - - # Look for the next opening parenthesis. This is the start of the - # parameter list (possibly on the next line shortly after virtual). - # TODO(unknown): doesn't work if there are virtual functions with - # decltype() or other things that use parentheses, but csearch suggests - # that this is rare. - end_col = -1 - end_line = -1 - start_col = len(virtual.group(2)) - for start_line in xrange(linenum, min(linenum + 3, clean_lines.NumLines())): - line = clean_lines.elided[start_line][start_col:] - parameter_list = Match(r'^([^(]*)\(', line) - if parameter_list: - # Match parentheses to find the end of the parameter list - (_, end_line, end_col) = CloseExpression( - clean_lines, start_line, start_col + len(parameter_list.group(1))) - break - start_col = 0 - - if end_col < 0: - return # Couldn't find end of parameter list, give up - - # Look for "override" or "final" after the parameter list - # (possibly on the next few lines). - for i in xrange(end_line, min(end_line + 3, clean_lines.NumLines())): - line = clean_lines.elided[i][end_col:] - match = Search(r'\b(override|final)\b', line) - if match: - error(filename, linenum, 'readability/inheritance', 4, - ('"virtual" is redundant since function is ' - 'already declared as "%s"' % match.group(1))) - - # Set end_col to check whole lines after we are done with the - # first line. - end_col = 0 - if Search(r'[^\w]\s*$', line): - break - - -def CheckRedundantOverrideOrFinal(filename, clean_lines, linenum, error): - """Check if line contains a redundant "override" or "final" virt-specifier. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ - # Look for closing parenthesis nearby. We need one to confirm where - # the declarator ends and where the virt-specifier starts to avoid - # false positives. - line = clean_lines.elided[linenum] - declarator_end = line.rfind(')') - if declarator_end >= 0: - fragment = line[declarator_end:] - else: - if linenum > 1 and clean_lines.elided[linenum - 1].rfind(')') >= 0: - fragment = line - else: - return - - # Check that at most one of "override" or "final" is present, not both - if Search(r'\boverride\b', fragment) and Search(r'\bfinal\b', fragment): - error(filename, linenum, 'readability/inheritance', 4, - ('"override" is redundant since function is ' - 'already declared as "final"')) - - - - -# Returns true if we are at a new block, and it is directly -# inside of a namespace. -def IsBlockInNameSpace(nesting_state, is_forward_declaration): - """Checks that the new block is directly in a namespace. - - Args: - nesting_state: The _NestingState object that contains info about our state. - is_forward_declaration: If the class is a forward declared class. - Returns: - Whether or not the new block is directly in a namespace. - """ - if is_forward_declaration: - return len(nesting_state.stack) >= 1 and ( - isinstance(nesting_state.stack[-1], _NamespaceInfo)) - - - return (len(nesting_state.stack) > 1 and - nesting_state.stack[-1].check_namespace_indentation and - isinstance(nesting_state.stack[-2], _NamespaceInfo)) - - -def ShouldCheckNamespaceIndentation(nesting_state, is_namespace_indent_item, - raw_lines_no_comments, linenum): - """This method determines if we should apply our namespace indentation check. - - Args: - nesting_state: The current nesting state. - is_namespace_indent_item: If we just put a new class on the stack, True. - If the top of the stack is not a class, or we did not recently - add the class, False. - raw_lines_no_comments: The lines without the comments. - linenum: The current line number we are processing. - - Returns: - True if we should apply our namespace indentation check. Currently, it - only works for classes and namespaces inside of a namespace. - """ - - is_forward_declaration = IsForwardClassDeclaration(raw_lines_no_comments, - linenum) - - if not (is_namespace_indent_item or is_forward_declaration): - return False - - # If we are in a macro, we do not want to check the namespace indentation. - if IsMacroDefinition(raw_lines_no_comments, linenum): - return False - - return IsBlockInNameSpace(nesting_state, is_forward_declaration) - - -# Call this method if the line is directly inside of a namespace. -# If the line above is blank (excluding comments) or the start of -# an inner namespace, it cannot be indented. -def CheckItemIndentationInNamespace(filename, raw_lines_no_comments, linenum, - error): - line = raw_lines_no_comments[linenum] - if Match(r'^\s+', line): - error(filename, linenum, 'runtime/indentation_namespace', 4, - 'Do not indent within a namespace') - - -def ProcessLine(filename, file_extension, clean_lines, line, - include_state, function_state, nesting_state, error, - extra_check_functions=None): - """Processes a single line in the file. - - Args: - filename: Filename of the file that is being processed. - file_extension: The extension (dot not included) of the file. - clean_lines: An array of strings, each representing a line of the file, - with comments stripped. - line: Number of line being processed. - include_state: An _IncludeState instance in which the headers are inserted. - function_state: A _FunctionState instance which counts function lines, etc. - nesting_state: A NestingState instance which maintains information about - the current stack of nested blocks being parsed. - error: A callable to which errors are reported, which takes 4 arguments: - filename, line number, error level, and message - extra_check_functions: An array of additional check functions that will be - run on each source line. Each function takes 4 - arguments: filename, clean_lines, line, error - """ - raw_lines = clean_lines.raw_lines - ParseNolintSuppressions(filename, raw_lines[line], line, error) - nesting_state.Update(filename, clean_lines, line, error) - CheckForNamespaceIndentation(filename, nesting_state, clean_lines, line, - error) - if nesting_state.InAsmBlock(): return - CheckForFunctionLengths(filename, clean_lines, line, function_state, error) - CheckForMultilineCommentsAndStrings(filename, clean_lines, line, error) - CheckStyle(filename, clean_lines, line, file_extension, nesting_state, error) - CheckLanguage(filename, clean_lines, line, file_extension, include_state, - nesting_state, error) - CheckForNonConstReference(filename, clean_lines, line, nesting_state, error) - CheckForNonStandardConstructs(filename, clean_lines, line, - nesting_state, error) - CheckVlogArguments(filename, clean_lines, line, error) - CheckPosixThreading(filename, clean_lines, line, error) - CheckInvalidIncrement(filename, clean_lines, line, error) - CheckMakePairUsesDeduction(filename, clean_lines, line, error) - CheckRedundantVirtual(filename, clean_lines, line, error) - CheckRedundantOverrideOrFinal(filename, clean_lines, line, error) - if extra_check_functions: - for check_fn in extra_check_functions: - check_fn(filename, clean_lines, line, error) - -def FlagCxx11Features(filename, clean_lines, linenum, error): - """Flag those c++11 features that we only allow in certain places. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ - line = clean_lines.elided[linenum] - - include = Match(r'\s*#\s*include\s+[<"]([^<"]+)[">]', line) - - # Flag unapproved C++ TR1 headers. - if include and include.group(1).startswith('tr1/'): - error(filename, linenum, 'build/c++tr1', 5, - ('C++ TR1 headers such as <%s> are unapproved.') % include.group(1)) - - # Flag unapproved C++11 headers. - if include and include.group(1) in ('cfenv', - 'condition_variable', - 'fenv.h', - 'future', - 'mutex', - 'thread', - 'chrono', - 'ratio', - 'regex', - 'system_error', - ): - error(filename, linenum, 'build/c++11', 5, - ('<%s> is an unapproved C++11 header.') % include.group(1)) - - # The only place where we need to worry about C++11 keywords and library - # features in preprocessor directives is in macro definitions. - if Match(r'\s*#', line) and not Match(r'\s*#\s*define\b', line): return - - # These are classes and free functions. The classes are always - # mentioned as std::*, but we only catch the free functions if - # they're not found by ADL. They're alphabetical by header. - for top_name in ( - # type_traits - 'alignment_of', - 'aligned_union', - ): - if Search(r'\bstd::%s\b' % top_name, line): - error(filename, linenum, 'build/c++11', 5, - ('std::%s is an unapproved C++11 class or function. Send c-style ' - 'an example of where it would make your code more readable, and ' - 'they may let you use it.') % top_name) - - -def FlagCxx14Features(filename, clean_lines, linenum, error): - """Flag those C++14 features that we restrict. - - Args: - filename: The name of the current file. - clean_lines: A CleansedLines instance containing the file. - linenum: The number of the line to check. - error: The function to call with any errors found. - """ - line = clean_lines.elided[linenum] - - include = Match(r'\s*#\s*include\s+[<"]([^<"]+)[">]', line) - - # Flag unapproved C++14 headers. - if include and include.group(1) in ('scoped_allocator', 'shared_mutex'): - error(filename, linenum, 'build/c++14', 5, - ('<%s> is an unapproved C++14 header.') % include.group(1)) - - -def ProcessFileData(filename, file_extension, lines, error, - extra_check_functions=None): - """Performs lint checks and reports any errors to the given error function. - - Args: - filename: Filename of the file that is being processed. - file_extension: The extension (dot not included) of the file. - lines: An array of strings, each representing a line of the file, with the - last element being empty if the file is terminated with a newline. - error: A callable to which errors are reported, which takes 4 arguments: - filename, line number, error level, and message - extra_check_functions: An array of additional check functions that will be - run on each source line. Each function takes 4 - arguments: filename, clean_lines, line, error - """ - lines = (['// marker so line numbers and indices both start at 1'] + lines + - ['// marker so line numbers end in a known way']) - - include_state = _IncludeState() - function_state = _FunctionState() - nesting_state = NestingState() - - ResetNolintSuppressions() - - CheckForCopyright(filename, lines, error) - ProcessGlobalSuppressions(lines) - RemoveMultiLineComments(filename, lines, error) - clean_lines = CleansedLines(lines) - - if IsHeaderExtension(file_extension): - CheckForHeaderGuard(filename, clean_lines, error) - - for line in xrange(clean_lines.NumLines()): - ProcessLine(filename, file_extension, clean_lines, line, - include_state, function_state, nesting_state, error, - extra_check_functions) - FlagCxx11Features(filename, clean_lines, line, error) - nesting_state.CheckCompletedBlocks(filename, error) - - CheckForIncludeWhatYouUse(filename, clean_lines, include_state, error) - - # Check that the .cc file has included its header if it exists. - if _IsSourceExtension(file_extension): - CheckHeaderFileIncluded(filename, include_state, error) - - # We check here rather than inside ProcessLine so that we see raw - # lines rather than "cleaned" lines. - CheckForBadCharacters(filename, lines, error) - - CheckForNewlineAtEOF(filename, lines, error) - -def ProcessConfigOverrides(filename): - """ Loads the configuration files and processes the config overrides. - - Args: - filename: The name of the file being processed by the linter. - - Returns: - False if the current |filename| should not be processed further. - """ - - abs_filename = os.path.abspath(filename) - cfg_filters = [] - keep_looking = True - while keep_looking: - abs_path, base_name = os.path.split(abs_filename) - if not base_name: - break # Reached the root directory. - - cfg_file = os.path.join(abs_path, "CPPLINT.cfg") - abs_filename = abs_path - if not os.path.isfile(cfg_file): - continue - - try: - with codecs.open(cfg_file, 'r', 'utf8', 'replace') as file_handle: - for line in file_handle: - line, _, _ = line.partition('#') # Remove comments. - if not line.strip(): - continue - - name, _, val = line.partition('=') - name = name.strip() - val = val.strip() - if name == 'set noparent': - keep_looking = False - elif name == 'filter': - cfg_filters.append(val) - elif name == 'exclude_files': - # When matching exclude_files pattern, use the base_name of - # the current file name or the directory name we are processing. - # For example, if we are checking for lint errors in /foo/bar/baz.cc - # and we found the .cfg file at /foo/CPPLINT.cfg, then the config - # file's "exclude_files" filter is meant to be checked against "bar" - # and not "baz" nor "bar/baz.cc". - if base_name: - pattern = re.compile(val) - if pattern.match(base_name): - if _cpplint_state.quiet: - # Suppress "Ignoring file" warning when using --quiet. - return False - _cpplint_state.PrintInfo('Ignoring "%s": file excluded by "%s". ' - 'File path component "%s" matches ' - 'pattern "%s"\n' % - (filename, cfg_file, base_name, val)) - return False - elif name == 'linelength': - global _line_length - try: - _line_length = int(val) - except ValueError: - _cpplint_state.PrintError('Line length must be numeric.') - elif name == 'extensions': - ProcessExtensionsOption(val) - elif name == 'root': - global _root - # root directories are specified relative to CPPLINT.cfg dir. - _root = os.path.join(os.path.dirname(cfg_file), val) - elif name == 'headers': - ProcessHppHeadersOption(val) - elif name == 'includeorder': - ProcessIncludeOrderOption(val) - else: - _cpplint_state.PrintError( - 'Invalid configuration option (%s) in file %s\n' % - (name, cfg_file)) - - except IOError: - _cpplint_state.PrintError( - "Skipping config file '%s': Can't open for reading\n" % cfg_file) - keep_looking = False - - # Apply all the accumulated filters in reverse order (top-level directory - # config options having the least priority). - for cfg_filter in reversed(cfg_filters): - _AddFilters(cfg_filter) - - return True - - -def ProcessFile(filename, vlevel, extra_check_functions=None): - """Does google-lint on a single file. - - Args: - filename: The name of the file to parse. - - vlevel: The level of errors to report. Every error of confidence - >= verbose_level will be reported. 0 is a good default. - - extra_check_functions: An array of additional check functions that will be - run on each source line. Each function takes 4 - arguments: filename, clean_lines, line, error - """ - - _SetVerboseLevel(vlevel) - _BackupFilters() - old_errors = _cpplint_state.error_count - - if not ProcessConfigOverrides(filename): - _RestoreFilters() - return - - lf_lines = [] - crlf_lines = [] - try: - # Support the UNIX convention of using "-" for stdin. Note that - # we are not opening the file with universal newline support - # (which codecs doesn't support anyway), so the resulting lines do - # contain trailing '\r' characters if we are reading a file that - # has CRLF endings. - # If after the split a trailing '\r' is present, it is removed - # below. - if filename == '-': - lines = codecs.StreamReaderWriter(sys.stdin, - codecs.getreader('utf8'), - codecs.getwriter('utf8'), - 'replace').read().split('\n') - else: - with codecs.open(filename, 'r', 'utf8', 'replace') as target_file: - lines = target_file.read().split('\n') - - # Remove trailing '\r'. - # The -1 accounts for the extra trailing blank line we get from split() - for linenum in range(len(lines) - 1): - if lines[linenum].endswith('\r'): - lines[linenum] = lines[linenum].rstrip('\r') - crlf_lines.append(linenum + 1) - else: - lf_lines.append(linenum + 1) - - except IOError: - _cpplint_state.PrintError( - "Skipping input '%s': Can't open for reading\n" % filename) - _RestoreFilters() - return - - # Note, if no dot is found, this will give the entire filename as the ext. - file_extension = filename[filename.rfind('.') + 1:] - - # When reading from stdin, the extension is unknown, so no cpplint tests - # should rely on the extension. - if filename != '-' and file_extension not in GetAllExtensions(): - _cpplint_state.PrintError('Ignoring %s; not a valid file name ' - '(%s)\n' % (filename, ', '.join(GetAllExtensions()))) - else: - ProcessFileData(filename, file_extension, lines, Error, - extra_check_functions) - - # If end-of-line sequences are a mix of LF and CR-LF, issue - # warnings on the lines with CR. - # - # Don't issue any warnings if all lines are uniformly LF or CR-LF, - # since critique can handle these just fine, and the style guide - # doesn't dictate a particular end of line sequence. - # - # We can't depend on os.linesep to determine what the desired - # end-of-line sequence should be, since that will return the - # server-side end-of-line sequence. - if lf_lines and crlf_lines: - # Warn on every line with CR. An alternative approach might be to - # check whether the file is mostly CRLF or just LF, and warn on the - # minority, we bias toward LF here since most tools prefer LF. - for linenum in crlf_lines: - Error(filename, linenum, 'whitespace/newline', 1, - 'Unexpected \\r (^M) found; better to use only \\n') - - # Suppress printing anything if --quiet was passed unless the error - # count has increased after processing this file. - if not _cpplint_state.quiet or old_errors != _cpplint_state.error_count: - _cpplint_state.PrintInfo('Done processing %s\n' % filename) - _RestoreFilters() - - -def PrintUsage(message): - """Prints a brief usage string and exits, optionally with an error message. - - Args: - message: The optional error message. - """ - sys.stderr.write(_USAGE % (sorted(list(GetAllExtensions())), - ','.join(sorted(list(GetAllExtensions()))), - sorted(GetHeaderExtensions()), - ','.join(sorted(GetHeaderExtensions())))) - - if message: - sys.exit('\nFATAL ERROR: ' + message) - else: - sys.exit(0) - -def PrintVersion(): - sys.stdout.write('Cpplint fork (https://github.com/cpplint/cpplint)\n') - sys.stdout.write('cpplint ' + __VERSION__ + '\n') - sys.stdout.write('Python ' + sys.version + '\n') - sys.exit(0) - -def PrintCategories(): - """Prints a list of all the error-categories used by error messages. - - These are the categories used to filter messages via --filter. - """ - sys.stderr.write(''.join(' %s\n' % cat for cat in _ERROR_CATEGORIES)) - sys.exit(0) - - -def ParseArguments(args): - """Parses the command line arguments. - - This may set the output format and verbosity level as side-effects. - - Args: - args: The command line arguments: - - Returns: - The list of filenames to lint. - """ - try: - (opts, filenames) = getopt.getopt(args, '', ['help', 'output=', 'verbose=', - 'v=', - 'version', - 'counting=', - 'filter=', - 'root=', - 'repository=', - 'linelength=', - 'extensions=', - 'exclude=', - 'recursive', - 'headers=', - 'includeorder=', - 'quiet']) - except getopt.GetoptError: - PrintUsage('Invalid arguments.') - - verbosity = _VerboseLevel() - output_format = _OutputFormat() - filters = '' - quiet = _Quiet() - counting_style = '' - recursive = False - - for (opt, val) in opts: - if opt == '--help': - PrintUsage(None) - if opt == '--version': - PrintVersion() - elif opt == '--output': - if val not in ('emacs', 'vs7', 'eclipse', 'junit', 'sed', 'gsed'): - PrintUsage('The only allowed output formats are emacs, vs7, eclipse ' - 'sed, gsed and junit.') - output_format = val - elif opt == '--quiet': - quiet = True - elif opt == '--verbose' or opt == '--v': - verbosity = int(val) - elif opt == '--filter': - filters = val - if not filters: - PrintCategories() - elif opt == '--counting': - if val not in ('total', 'toplevel', 'detailed'): - PrintUsage('Valid counting options are total, toplevel, and detailed') - counting_style = val - elif opt == '--root': - global _root - _root = val - elif opt == '--repository': - global _repository - _repository = val - elif opt == '--linelength': - global _line_length - try: - _line_length = int(val) - except ValueError: - PrintUsage('Line length must be digits.') - elif opt == '--exclude': - global _excludes - if not _excludes: - _excludes = set() - _excludes.update(glob.glob(val)) - elif opt == '--extensions': - ProcessExtensionsOption(val) - elif opt == '--headers': - ProcessHppHeadersOption(val) - elif opt == '--recursive': - recursive = True - elif opt == '--includeorder': - ProcessIncludeOrderOption(val) - - if not filenames: - PrintUsage('No files were specified.') - - if recursive: - filenames = _ExpandDirectories(filenames) - - if _excludes: - filenames = _FilterExcludedFiles(filenames) - - _SetOutputFormat(output_format) - _SetQuiet(quiet) - _SetVerboseLevel(verbosity) - _SetFilters(filters) - _SetCountingStyle(counting_style) - - filenames.sort() - return filenames - -def _ExpandDirectories(filenames): - """Searches a list of filenames and replaces directories in the list with - all files descending from those directories. Files with extensions not in - the valid extensions list are excluded. - - Args: - filenames: A list of files or directories - - Returns: - A list of all files that are members of filenames or descended from a - directory in filenames - """ - expanded = set() - for filename in filenames: - if not os.path.isdir(filename): - expanded.add(filename) - continue - - for root, _, files in os.walk(filename): - for loopfile in files: - fullname = os.path.join(root, loopfile) - if fullname.startswith('.' + os.path.sep): - fullname = fullname[len('.' + os.path.sep):] - expanded.add(fullname) - - filtered = [] - for filename in expanded: - if os.path.splitext(filename)[1][1:] in GetAllExtensions(): - filtered.append(filename) - return filtered - -def _FilterExcludedFiles(fnames): - """Filters out files listed in the --exclude command line switch. File paths - in the switch are evaluated relative to the current working directory - """ - exclude_paths = [os.path.abspath(f) for f in _excludes] - # because globbing does not work recursively, exclude all subpath of all excluded entries - return [f for f in fnames - if not any(e for e in exclude_paths - if _IsParentOrSame(e, os.path.abspath(f)))] - -def _IsParentOrSame(parent, child): - """Return true if child is subdirectory of parent. - Assumes both paths are absolute and don't contain symlinks. - """ - parent = os.path.normpath(parent) - child = os.path.normpath(child) - if parent == child: - return True - - prefix = os.path.commonprefix([parent, child]) - if prefix != parent: - return False - # Note: os.path.commonprefix operates on character basis, so - # take extra care of situations like '/foo/ba' and '/foo/bar/baz' - child_suffix = child[len(prefix):] - child_suffix = child_suffix.lstrip(os.sep) - return child == os.path.join(prefix, child_suffix) - -def main(): - filenames = ParseArguments(sys.argv[1:]) - backup_err = sys.stderr - try: - # Change stderr to write with replacement characters so we don't die - # if we try to print something containing non-ASCII characters. - sys.stderr = codecs.StreamReader(sys.stderr, 'replace') - - _cpplint_state.ResetErrorCounts() - for filename in filenames: - ProcessFile(filename, _cpplint_state.verbose_level) - # If --quiet is passed, suppress printing error count unless there are errors. - if not _cpplint_state.quiet or _cpplint_state.error_count > 0: - _cpplint_state.PrintErrorCounts() - - if _cpplint_state.output_format == 'junit': - sys.stderr.write(_cpplint_state.FormatJUnitXML()) - - finally: - sys.stderr = backup_err - - sys.exit(_cpplint_state.error_count > 0) - - -if __name__ == '__main__': - main() diff --git a/cpp/build-support/iwyu/iwyu-filter.awk b/cpp/build-support/iwyu/iwyu-filter.awk deleted file mode 100644 index 943ab115c25..00000000000 --- a/cpp/build-support/iwyu/iwyu-filter.awk +++ /dev/null @@ -1,96 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# -# This is an awk script to process output from the include-what-you-use (IWYU) -# tool. As of now, IWYU is of alpha quality and it gives many incorrect -# recommendations -- obviously invalid or leading to compilation breakage. -# Most of those can be silenced using appropriate IWYU pragmas, but it's not -# the case for the auto-generated files. -# -# Also, it's possible to address invalid recommendation using mappings: -# https://github.com/include-what-you-use/include-what-you-use/blob/master/docs/IWYUMappings.md -# -# Usage: -# 1. Run the CMake with -DCMAKE_CXX_INCLUDE_WHAT_YOU_USE= -# -# The path to the IWYU binary should be absolute. The path to the binary -# and the command-line options should be separated by semicolon -# (that's for feeding it into CMake list variables). -# -# E.g., from the build directory (line breaks are just for readability): -# -# CC=../../thirdparty/clang-toolchain/bin/clang -# CXX=../../thirdparty/clang-toolchain/bin/clang++ -# IWYU="`pwd`../../thirdparty/clang-toolchain/bin/include-what-you-use;\ -# -Xiwyu;--mapping_file=`pwd`../../build-support/iwyu/mappings/map.imp" -# -# ../../build-support/enable_devtoolset.sh \ -# env CC=$CC CXX=$CXX \ -# ../../thirdparty/installed/common/bin/cmake \ -# -DCMAKE_CXX_INCLUDE_WHAT_YOU_USE=\"$IWYU\" \ -# ../.. -# -# NOTE: -# Since the arrow code has some 'ifdef NDEBUG' directives, it's possible -# that IWYU would produce different results if run against release, not -# debug build. However, we plan to use the tool only with debug builds. -# -# 2. Run make, separating the output from the IWYU tool into a separate file -# (it's possible to use piping the output from the tool to the script -# but having a file is good for future reference, if necessary): -# -# make -j$(nproc) 2>/tmp/iwyu.log -# -# 3. Process the output from the IWYU tool using the script: -# -# awk -f ../../build-support/iwyu/iwyu-filter.awk /tmp/iwyu.log -# - -BEGIN { - # This is the list of the files for which the suggestions from IWYU are - # ignored. Eventually, this list should become empty as soon as all the valid - # suggestions are addressed and invalid ones are taken care either by proper - # IWYU pragmas or adding special mappings (e.g. like boost mappings). - # muted["relative/path/to/file"] - muted["arrow/util/bit-util-test.cc"] - muted["arrow/util/rle-encoding-test.cc"] - muted["arrow/vendored"] - muted["include/hdfs.h"] - muted["arrow/visitor.h"] -} - -# mute all suggestions for the auto-generated files -/.*\.(pb|proxy|service)\.(cc|h) should (add|remove) these lines:/, /^$/ { - next -} - -# mute suggestions for the explicitly specified files -/.* should (add|remove) these lines:/ { - do_print = 1 - for (path in muted) { - if (index($0, path)) { - do_print = 0 - break - } - } -} -/^$/ { - if (do_print) print - do_print = 0 -} -{ if (do_print) print } diff --git a/cpp/build-support/iwyu/iwyu.sh b/cpp/build-support/iwyu/iwyu.sh deleted file mode 100755 index 58ffce0c353..00000000000 --- a/cpp/build-support/iwyu/iwyu.sh +++ /dev/null @@ -1,90 +0,0 @@ -#!/usr/bin/env bash -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -# -set -uo pipefail - -ROOT=$(cd $(dirname $BASH_SOURCE)/../../..; pwd) - -IWYU_LOG=$(mktemp -t arrow-cpp-iwyu.XXXXXX) -trap "rm -f $IWYU_LOG" EXIT - -IWYU_MAPPINGS_PATH="$ROOT/cpp/build-support/iwyu/mappings" -IWYU_ARGS="\ - --mapping_file=$IWYU_MAPPINGS_PATH/boost-all.imp \ - --mapping_file=$IWYU_MAPPINGS_PATH/boost-all-private.imp \ - --mapping_file=$IWYU_MAPPINGS_PATH/boost-extra.imp \ - --mapping_file=$IWYU_MAPPINGS_PATH/gflags.imp \ - --mapping_file=$IWYU_MAPPINGS_PATH/glog.imp \ - --mapping_file=$IWYU_MAPPINGS_PATH/gmock.imp \ - --mapping_file=$IWYU_MAPPINGS_PATH/gtest.imp \ - --mapping_file=$IWYU_MAPPINGS_PATH/arrow-misc.imp" - -set -e - -affected_files() { - pushd $ROOT > /dev/null - local commit=$($ROOT/cpp/build-support/get-upstream-commit.sh) - git diff --name-only $commit | awk '/\.(c|cc|h)$/' - popd > /dev/null -} - -# Show the IWYU version. Also causes the script to fail if iwyu is not in your -# PATH -include-what-you-use --version - -if [[ "${1:-}" == "all" ]]; then - ${PYTHON:-python3} $ROOT/cpp/build-support/iwyu/iwyu_tool.py -p ${IWYU_COMPILATION_DATABASE_PATH:-.} \ - -- $IWYU_ARGS | awk -f $ROOT/cpp/build-support/iwyu/iwyu-filter.awk -elif [[ "${1:-}" == "match" ]]; then - ALL_FILES= - IWYU_FILE_LIST= - for path in $(find $ROOT/cpp/src -type f | awk '/\.(c|cc|h)$/'); do - if [[ $path =~ $2 ]]; then - IWYU_FILE_LIST="$IWYU_FILE_LIST $path" - fi - done - - echo "Running IWYU on $IWYU_FILE_LIST" - ${PYTHON:-python3} $ROOT/cpp/build-support/iwyu/iwyu_tool.py \ - -p ${IWYU_COMPILATION_DATABASE_PATH:-.} $IWYU_FILE_LIST -- \ - $IWYU_ARGS | awk -f $ROOT/cpp/build-support/iwyu/iwyu-filter.awk -else - # Build the list of updated files which are of IWYU interest. - file_list_tmp=$(affected_files) - if [ -z "$file_list_tmp" ]; then - exit 0 - fi - - # Adjust the path for every element in the list. The iwyu_tool.py normalizes - # paths (via realpath) to match the records from the compilation database. - IWYU_FILE_LIST= - for p in $file_list_tmp; do - IWYU_FILE_LIST="$IWYU_FILE_LIST $ROOT/$p" - done - - ${PYTHON:-python3} $ROOT/cpp/build-support/iwyu/iwyu_tool.py \ - -p ${IWYU_COMPILATION_DATABASE_PATH:-.} $IWYU_FILE_LIST -- \ - $IWYU_ARGS | awk -f $ROOT/cpp/build-support/iwyu/iwyu-filter.awk > $IWYU_LOG -fi - -if [ -s "$IWYU_LOG" ]; then - # The output is not empty: the changelist needs correction. - cat $IWYU_LOG 1>&2 - exit 1 -fi diff --git a/cpp/build-support/iwyu/iwyu_tool.py b/cpp/build-support/iwyu/iwyu_tool.py deleted file mode 100755 index a0ee11a0296..00000000000 --- a/cpp/build-support/iwyu/iwyu_tool.py +++ /dev/null @@ -1,280 +0,0 @@ -#!/usr/bin/env python - -# This file has been imported into the apache source tree from -# the IWYU source tree as of version 0.8 -# https://github.com/include-what-you-use/include-what-you-use/blob/master/iwyu_tool.py -# and corresponding license has been added: -# https://github.com/include-what-you-use/include-what-you-use/blob/master/LICENSE.TXT -# -# ============================================================================== -# LLVM Release License -# ============================================================================== -# University of Illinois/NCSA -# Open Source License -# -# Copyright (c) 2003-2010 University of Illinois at Urbana-Champaign. -# All rights reserved. -# -# Developed by: -# -# LLVM Team -# -# University of Illinois at Urbana-Champaign -# -# http://llvm.org -# -# Permission is hereby granted, free of charge, to any person obtaining a copy of -# this software and associated documentation files (the "Software"), to deal with -# the Software without restriction, including without limitation the rights to -# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -# of the Software, and to permit persons to whom the Software is furnished to do -# so, subject to the following conditions: -# -# * Redistributions of source code must retain the above copyright notice, -# this list of conditions and the following disclaimers. -# -# * Redistributions in binary form must reproduce the above copyright notice, -# this list of conditions and the following disclaimers in the -# documentation and/or other materials provided with the distribution. -# -# * Neither the names of the LLVM Team, University of Illinois at -# Urbana-Champaign, nor the names of its contributors may be used to -# endorse or promote products derived from this Software without specific -# prior written permission. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS -# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# CONTRIBUTORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS WITH THE -# SOFTWARE. - -""" Driver to consume a Clang compilation database and invoke IWYU. - -Example usage with CMake: - - # Unix systems - $ mkdir build && cd build - $ CC="clang" CXX="clang++" cmake -DCMAKE_EXPORT_COMPILE_COMMANDS=ON ... - $ iwyu_tool.py -p . - - # Windows systems - $ mkdir build && cd build - $ cmake -DCMAKE_CXX_COMPILER="%VCINSTALLDIR%/bin/cl.exe" \ - -DCMAKE_C_COMPILER="%VCINSTALLDIR%/VC/bin/cl.exe" \ - -DCMAKE_EXPORT_COMPILE_COMMANDS=ON \ - -G Ninja ... - $ python iwyu_tool.py -p . - -See iwyu_tool.py -h for more details on command-line arguments. -""" - -import os -import sys -import json -import argparse -import subprocess -import re - -import logging - -logging.basicConfig(filename='iwyu.log') -LOGGER = logging.getLogger("iwyu") - - -def iwyu_formatter(output): - """ Process iwyu's output, basically a no-op. """ - print('\n'.join(output)) - - -CORRECT_RE = re.compile(r'^\((.*?) has correct #includes/fwd-decls\)$') -SHOULD_ADD_RE = re.compile(r'^(.*?) should add these lines:$') -SHOULD_REMOVE_RE = re.compile(r'^(.*?) should remove these lines:$') -FULL_LIST_RE = re.compile(r'The full include-list for (.*?):$') -END_RE = re.compile(r'^---$') -LINES_RE = re.compile(r'^- (.*?) // lines ([0-9]+)-[0-9]+$') - - -GENERAL, ADD, REMOVE, LIST = range(4) - - -def clang_formatter(output): - """ Process iwyu's output into something clang-like. """ - state = (GENERAL, None) - for line in output: - match = CORRECT_RE.match(line) - if match: - print('%s:1:1: note: #includes/fwd-decls are correct', match.groups(1)) - continue - match = SHOULD_ADD_RE.match(line) - if match: - state = (ADD, match.group(1)) - continue - match = SHOULD_REMOVE_RE.match(line) - if match: - state = (REMOVE, match.group(1)) - continue - match = FULL_LIST_RE.match(line) - if match: - state = (LIST, match.group(1)) - elif END_RE.match(line): - state = (GENERAL, None) - elif not line.strip(): - continue - elif state[0] == GENERAL: - print(line) - elif state[0] == ADD: - print('%s:1:1: error: add the following line', state[1]) - print(line) - elif state[0] == REMOVE: - match = LINES_RE.match(line) - line_no = match.group(2) if match else '1' - print('%s:%s:1: error: remove the following line', state[1], line_no) - print(match.group(1)) - - -DEFAULT_FORMAT = 'iwyu' -FORMATTERS = { - 'iwyu': iwyu_formatter, - 'clang': clang_formatter -} - - -def get_output(cwd, command): - """ Run the given command and return its output as a string. """ - process = subprocess.Popen(command, - cwd=cwd, - shell=True, - stdout=subprocess.PIPE, - stderr=subprocess.STDOUT) - return process.communicate()[0].decode("utf-8").splitlines() - - -def run_iwyu(cwd, compile_command, iwyu_args, verbose, formatter): - """ Rewrite compile_command to an IWYU command, and run it. """ - compiler, _, args = compile_command.partition(' ') - if compiler.endswith('cl.exe'): - # If the compiler name is cl.exe, let IWYU be cl-compatible - clang_args = ['--driver-mode=cl'] - else: - clang_args = [] - - iwyu_args = ['-Xiwyu ' + a for a in iwyu_args] - command = ['include-what-you-use'] + clang_args + iwyu_args - command = '%s %s' % (' '.join(command), args.strip()) - - if verbose: - print('%s:', command) - - formatter(get_output(cwd, command)) - - -def main(compilation_db_path, source_files, verbose, formatter, iwyu_args): - """ Entry point. """ - # Canonicalize compilation database path - if os.path.isdir(compilation_db_path): - compilation_db_path = os.path.join(compilation_db_path, - 'compile_commands.json') - - compilation_db_path = os.path.realpath(compilation_db_path) - if not os.path.isfile(compilation_db_path): - print('ERROR: No such file or directory: \'%s\'', compilation_db_path) - return 1 - - # Read compilation db from disk - with open(compilation_db_path, 'r') as fileobj: - compilation_db = json.load(fileobj) - - # expand symlinks - for entry in compilation_db: - entry['file'] = os.path.realpath(entry['file']) - - # Cross-reference source files with compilation database - source_files = [os.path.realpath(s) for s in source_files] - if not source_files: - # No source files specified, analyze entire compilation database - entries = compilation_db - else: - # Source files specified, analyze the ones appearing in compilation db, - # warn for the rest. - entries = [] - for source in source_files: - matches = [e for e in compilation_db if e['file'] == source] - if matches: - entries.extend(matches) - else: - print(f"{source} not in compilation database") - # TODO: As long as there is no complete compilation database available this check cannot be performed - pass - #print('WARNING: \'%s\' not found in compilation database.', source) - - # Run analysis - try: - for entry in entries: - cwd, compile_command = entry['directory'], entry['command'] - run_iwyu(cwd, compile_command, iwyu_args, verbose, formatter) - except OSError as why: - print('ERROR: Failed to launch include-what-you-use: %s', why) - return 1 - - return 0 - - -def _bootstrap(): - """ Parse arguments and dispatch to main(). """ - # This hackery is necessary to add the forwarded IWYU args to the - # usage and help strings. - def customize_usage(parser): - """ Rewrite the parser's format_usage. """ - original_format_usage = parser.format_usage - parser.format_usage = lambda: original_format_usage().rstrip() + \ - ' -- []' + os.linesep - - def customize_help(parser): - """ Rewrite the parser's format_help. """ - original_format_help = parser.format_help - - def custom_help(): - """ Customized help string, calls the adjusted format_usage. """ - helpmsg = original_format_help() - helplines = helpmsg.splitlines() - helplines[0] = parser.format_usage().rstrip() - return os.linesep.join(helplines) + os.linesep - - parser.format_help = custom_help - - # Parse arguments - parser = argparse.ArgumentParser( - description='Include-what-you-use compilation database driver.', - epilog='Assumes include-what-you-use is available on the PATH.') - customize_usage(parser) - customize_help(parser) - - parser.add_argument('-v', '--verbose', action='store_true', - help='Print IWYU commands') - parser.add_argument('-o', '--output-format', type=str, - choices=FORMATTERS.keys(), default=DEFAULT_FORMAT, - help='Output format (default: %s)' % DEFAULT_FORMAT) - parser.add_argument('-p', metavar='', required=True, - help='Compilation database path', dest='dbpath') - parser.add_argument('source', nargs='*', - help='Zero or more source files to run IWYU on. ' - 'Defaults to all in compilation database.') - - def partition_args(argv): - """ Split around '--' into driver args and IWYU args. """ - try: - double_dash = argv.index('--') - return argv[:double_dash], argv[double_dash+1:] - except ValueError: - return argv, [] - argv, iwyu_args = partition_args(sys.argv[1:]) - args = parser.parse_args(argv) - - sys.exit(main(args.dbpath, args.source, args.verbose, - FORMATTERS[args.output_format], iwyu_args)) - - -if __name__ == '__main__': - _bootstrap() diff --git a/cpp/build-support/iwyu/mappings/arrow-misc.imp b/cpp/build-support/iwyu/mappings/arrow-misc.imp deleted file mode 100644 index 6f144f1f34e..00000000000 --- a/cpp/build-support/iwyu/mappings/arrow-misc.imp +++ /dev/null @@ -1,61 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -[ - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", public, "", public ] }, - { include: ["", public, "", public ] }, - { include: ["", public, "", public ] }, - { symbol: ["bool", private, "", public ] }, - { symbol: ["false", private, "", public ] }, - { symbol: ["true", private, "", public ] }, - { symbol: ["int8_t", private, "", public ] }, - { symbol: ["int16_t", private, "", public ] }, - { symbol: ["int32_t", private, "", public ] }, - { symbol: ["int64_t", private, "", public ] }, - { symbol: ["uint8_t", private, "", public ] }, - { symbol: ["uint16_t", private, "", public ] }, - { symbol: ["uint32_t", private, "", public ] }, - { symbol: ["uint64_t", private, "", public ] }, - { symbol: ["size_t", private, "", public ] }, - { symbol: ["variant", private, "\"arrow/compute/kernel.h\"", public ] }, - { symbol: ["default_memory_pool", private, "\"arrow/type_fwd.h\"", public ] }, - { symbol: ["make_shared", private, "", public ] }, - { symbol: ["shared_ptr", private, "", public ] }, - { symbol: ["_Node_const_iterator", private, "", public ] }, - { symbol: ["unordered_map<>::mapped_type", private, "", public ] }, - { symbol: ["std::copy", private, "", public ] }, - { symbol: ["std::move", private, "", public ] }, - { symbol: ["std::transform", private, "", public ] }, - { symbol: ["pair", private, "", public ] }, - { symbol: ["errno", private, "", public ] }, - { symbol: ["posix_memalign", private, "", public ] } -] diff --git a/cpp/build-support/iwyu/mappings/boost-all-private.imp b/cpp/build-support/iwyu/mappings/boost-all-private.imp deleted file mode 100644 index 133eef11375..00000000000 --- a/cpp/build-support/iwyu/mappings/boost-all-private.imp +++ /dev/null @@ -1,4166 +0,0 @@ -# This file has been imported into the arrow source tree from -# the IWYU source tree as of version 0.8 -# https://github.com/include-what-you-use/include-what-you-use/blob/master/boost-all-private.imp -# and corresponding license has been added: -# https://github.com/include-what-you-use/include-what-you-use/blob/master/LICENSE.TXT -# -# ============================================================================== -# LLVM Release License -# ============================================================================== -# University of Illinois/NCSA -# Open Source License -# -# Copyright (c) 2003-2010 University of Illinois at Urbana-Champaign. -# All rights reserved. -# -# Developed by: -# -# LLVM Team -# -# University of Illinois at Urbana-Champaign -# -# http://llvm.org -# -# Permission is hereby granted, free of charge, to any person obtaining a copy of -# this software and associated documentation files (the "Software"), to deal with -# the Software without restriction, including without limitation the rights to -# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -# of the Software, and to permit persons to whom the Software is furnished to do -# so, subject to the following conditions: -# -# * Redistributions of source code must retain the above copyright notice, -# this list of conditions and the following disclaimers. -# -# * Redistributions in binary form must reproduce the above copyright notice, -# this list of conditions and the following disclaimers in the -# documentation and/or other materials provided with the distribution. -# -# * Neither the names of the LLVM Team, University of Illinois at -# Urbana-Champaign, nor the names of its contributors may be used to -# endorse or promote products derived from this Software without specific -# prior written permission. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS -# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# CONTRIBUTORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS WITH THE -# SOFTWARE. - -[ -#grep -r '^ *# *include' boost/ | grep -e "boost/[^:]*/detail/.*hp*:" -e "boost/[^:]*/impl/.*hp*:" | grep -e "\:.*/detail/" -e "\:.*/impl/" | perl -nle 'm/^([^:]+).*["<]([^>]+)[">]/ && print qq@ { include: ["<$2>", private, "<$1>", private ] },@' | grep -e \\[\"\", private, "", private ] }, -# { include: ["", private, "", private ] }, -# -# { include: ["", private, "", private ] }, -# { include: ["", private, "", private ] }, -# { include: ["", private, "", private ] }, -# { include: ["", private, "", private ] }, - - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] }, - { include: ["", private, "", private ] } -] diff --git a/cpp/build-support/iwyu/mappings/boost-all.imp b/cpp/build-support/iwyu/mappings/boost-all.imp deleted file mode 100644 index 7c48acaf341..00000000000 --- a/cpp/build-support/iwyu/mappings/boost-all.imp +++ /dev/null @@ -1,5679 +0,0 @@ -# This file has been imported into the apache source tree from -# the IWYU source tree as of version 0.8 -# https://github.com/include-what-you-use/include-what-you-use/blob/master/boost-all.imp -# and corresponding license has been added: -# https://github.com/include-what-you-use/include-what-you-use/blob/master/LICENSE.TXT -# -# ============================================================================== -# LLVM Release License -# ============================================================================== -# University of Illinois/NCSA -# Open Source License -# -# Copyright (c) 2003-2010 University of Illinois at Urbana-Champaign. -# All rights reserved. -# -# Developed by: -# -# LLVM Team -# -# University of Illinois at Urbana-Champaign -# -# http://llvm.org -# -# Permission is hereby granted, free of charge, to any person obtaining a copy of -# this software and associated documentation files (the "Software"), to deal with -# the Software without restriction, including without limitation the rights to -# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies -# of the Software, and to permit persons to whom the Software is furnished to do -# so, subject to the following conditions: -# -# * Redistributions of source code must retain the above copyright notice, -# this list of conditions and the following disclaimers. -# -# * Redistributions in binary form must reproduce the above copyright notice, -# this list of conditions and the following disclaimers in the -# documentation and/or other materials provided with the distribution. -# -# * Neither the names of the LLVM Team, University of Illinois at -# Urbana-Champaign, nor the names of its contributors may be used to -# endorse or promote products derived from this Software without specific -# prior written permission. -# -# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR -# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS -# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE -# CONTRIBUTORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER -# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, -# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS WITH THE -# SOFTWARE. - -[ -# cd /usr/include && grep -r --exclude-dir={detail,impl} '^ *# *include' boost/ | perl -nle 'm/^([^:]+).*["<]([^>]+)[">]/ && print qq@ { include: ["<$2>", private, "<$1>", public ] },@' | grep -e \/detail\/ -e \/impl\/ | grep -e \\[\"\", private, "", public ] }, -{ include: ["@", private, "", public ] }, -{ include: ["@", private, "", public ] }, -{ include: ["@", private, "", public ] }, -#manually delete $ sed '/workarounds*\.hpp/d' -i boost-all.imp -#also good idea to remove all lines referring to folders above (e.g., sed '/\/format\//d' -i boost-all.imp) -#programmatically include: - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] } -] diff --git a/cpp/build-support/iwyu/mappings/boost-extra.imp b/cpp/build-support/iwyu/mappings/boost-extra.imp deleted file mode 100644 index aba1e419168..00000000000 --- a/cpp/build-support/iwyu/mappings/boost-extra.imp +++ /dev/null @@ -1,23 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -[ - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] }, - { include: ["", private, "", public ] } -] diff --git a/cpp/build-support/iwyu/mappings/gflags.imp b/cpp/build-support/iwyu/mappings/gflags.imp deleted file mode 100644 index 46ce63d1e71..00000000000 --- a/cpp/build-support/iwyu/mappings/gflags.imp +++ /dev/null @@ -1,20 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -[ - # confuses the IWYU tool because of the 'using ' - { symbol: [ "fLS::clstring", private, "", public ] } -] diff --git a/cpp/build-support/iwyu/mappings/glog.imp b/cpp/build-support/iwyu/mappings/glog.imp deleted file mode 100644 index 08c5e3529bc..00000000000 --- a/cpp/build-support/iwyu/mappings/glog.imp +++ /dev/null @@ -1,27 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -[ - { symbol: [ "LOG", private, "", public ] }, - { symbol: [ "VLOG", private, "", public ] }, - { symbol: [ "CHECK_EQ", private, "", public ] }, - { symbol: [ "CHECK_NE", private, "", public ] }, - { symbol: [ "CHECK_LT", private, "", public ] }, - { symbol: [ "CHECK_GE", private, "", public ] }, - { symbol: [ "CHECK_GT", private, "", public ] }, - { symbol: [ "ErrnoLogMessage", private, "", public ] }, - { symbol: [ "COMPACT_GOOGLE_LOG_0", private, "", public ] } -] diff --git a/cpp/build-support/iwyu/mappings/gmock.imp b/cpp/build-support/iwyu/mappings/gmock.imp deleted file mode 100644 index 76e7cafddde..00000000000 --- a/cpp/build-support/iwyu/mappings/gmock.imp +++ /dev/null @@ -1,23 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -#include -#include - -[ - { include: [ "", private, "", public ] }, - { include: [ "", private, "", public ] } -] \ No newline at end of file diff --git a/cpp/build-support/iwyu/mappings/gtest.imp b/cpp/build-support/iwyu/mappings/gtest.imp deleted file mode 100644 index a54165027e7..00000000000 --- a/cpp/build-support/iwyu/mappings/gtest.imp +++ /dev/null @@ -1,26 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. -[ - { include: [ "", private, "", public ] }, - { include: [ "", private, "", public ] }, - { include: [ "", private, "", public ] }, - { include: [ "", private, "", public ] }, - { include: [ "", private, "", public ] }, - { include: [ "", private, "", public ] }, - { include: [ "", private, "", public ] }, - { include: [ "", private, "", public ] } -] diff --git a/cpp/build-support/lint_cpp_cli.py b/cpp/build-support/lint_cpp_cli.py deleted file mode 100755 index 403964b7817..00000000000 --- a/cpp/build-support/lint_cpp_cli.py +++ /dev/null @@ -1,130 +0,0 @@ -#!/usr/bin/env python3 -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import argparse -import re -import os - -parser = argparse.ArgumentParser( - description="Check for illegal headers for C++/CLI applications") -parser.add_argument("source_path", - help="Path to source code") -arguments = parser.parse_args() - - -_STRIP_COMMENT_REGEX = re.compile('(.+)?(?=//)') -_NULLPTR_REGEX = re.compile(r'.*\bnullptr\b.*') -_RETURN_NOT_OK_REGEX = re.compile(r'.*\sRETURN_NOT_OK.*') -_ASSIGN_OR_RAISE_REGEX = re.compile(r'.*\sASSIGN_OR_RAISE.*') -_DCHECK_REGEX = re.compile(r'.*\sDCHECK.*') - - -def _paths(paths): - return [p.strip().replace('/', os.path.sep) for p in paths.splitlines()] - - -def _strip_comments(line): - m = _STRIP_COMMENT_REGEX.match(line) - if not m: - return line - else: - return m.group(0) - - -def lint_file(path): - fail_rules = [ - # rule, error message, rule-specific exclusions list - (lambda x: '' in x, 'Uses ', []), - (lambda x: '' in x, 'Uses ', []), - (lambda x: re.match(_NULLPTR_REGEX, x), 'Uses nullptr', []), - (lambda x: re.match(_RETURN_NOT_OK_REGEX, x), - 'Use ARROW_RETURN_NOT_OK in header files', _paths('''\ - arrow/status.h - arrow/python/util''')), - (lambda x: re.match(_ASSIGN_OR_RAISE_REGEX, x), - 'Use ARROW_ASSIGN_OR_RAISE in header files', []), - (lambda x: re.match(_DCHECK_REGEX, x), - 'Use ARROW_DCHECK in header files', _paths('''\ - arrow/util/logging.h''')) - - ] - - with open(path) as f: - for i, line in enumerate(f): - stripped_line = _strip_comments(line) - for rule, why, rule_exclusions in fail_rules: - if any([True for excl in rule_exclusions if excl in path]): - continue - - if rule(stripped_line): - yield path, why, i, line - - -EXCLUSIONS = _paths('''\ - arrow/arrow-config.cmake - arrow/flight/sql/odbc/flight_sql/get_info_cache.h - arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/blocking_queue.h - arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/odbc_impl/odbc_handle.h - arrow/python/iterators.h - arrow/util/hashing.h - arrow/util/macros.h - arrow/util/parallel.h - arrow/vendored - arrow/visitor_inline.h - gandiva/cache.h - gandiva/jni - jni/ - test - internal - _generated''') - - -def lint_files(): - for dirpath, _, filenames in os.walk(arguments.source_path): - for filename in filenames: - full_path = os.path.join(dirpath, filename) - - exclude = False - for exclusion in EXCLUSIONS: - if exclusion in full_path: - exclude = True - break - - if exclude: - continue - - # Lint file name, except for pkg-config templates - if not filename.endswith('.pc.in'): - if '-' in filename: - why = ("Please use underscores, not hyphens, " - "in source file names") - yield full_path, why, 0, full_path - - # Only run on header files - if filename.endswith('.h'): - for _ in lint_file(full_path): - yield _ - - -if __name__ == '__main__': - failures = list(lint_files()) - for path, why, i, line in failures: - print(f'File {path} failed C++/CLI lint check: {why}\n' - f'Line {i + 1}: {line}') - if failures: - exit(1) diff --git a/cpp/build-support/lint_exclusions.txt b/cpp/build-support/lint_exclusions.txt deleted file mode 100644 index aa57db72ce9..00000000000 --- a/cpp/build-support/lint_exclusions.txt +++ /dev/null @@ -1,13 +0,0 @@ -*.grpc.fb.* -*.pb.* -*RcppExports.cpp* -*_generated* -*arrowExports.cpp* -*parquet_types.* -*pyarrow_api.h -*pyarrow_lib.h -*python/config.h -*python/platform.h -*thirdparty/* -*vendored/* -*windows_compatibility.h diff --git a/cpp/build-support/lintutils.py b/cpp/build-support/lintutils.py deleted file mode 100644 index 2386eb2e6af..00000000000 --- a/cpp/build-support/lintutils.py +++ /dev/null @@ -1,109 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import multiprocessing as mp -import os -from fnmatch import fnmatch -from subprocess import Popen - - -def chunk(seq, n): - """ - divide a sequence into equal sized chunks - (the last chunk may be smaller, but won't be empty) - """ - chunks = [] - some = [] - for element in seq: - if len(some) == n: - chunks.append(some) - some = [] - some.append(element) - if len(some) > 0: - chunks.append(some) - return chunks - - -def dechunk(chunks): - "flatten chunks into a single list" - seq = [] - for chunk in chunks: - seq.extend(chunk) - return seq - - -def run_parallel(cmds, **kwargs): - """ - Run each of cmds (with shared **kwargs) using subprocess.Popen - then wait for all of them to complete. - Runs batches of multiprocessing.cpu_count() * 2 from cmds - returns a list of tuples containing each process' - returncode, stdout, stderr - """ - complete = [] - for cmds_batch in chunk(cmds, mp.cpu_count() * 2): - procs_batch = [Popen(cmd, **kwargs) for cmd in cmds_batch] - for proc in procs_batch: - stdout, stderr = proc.communicate() - complete.append((proc.returncode, stdout, stderr)) - return complete - - -_source_extensions = ''' -.h -.cc -.cpp -'''.split() - - -def get_sources(source_dir, exclude_globs=[]): - sources = [] - for directory, subdirs, basenames in os.walk(source_dir): - for path in [os.path.join(directory, basename) - for basename in basenames]: - # filter out non-source files - if os.path.splitext(path)[1] not in _source_extensions: - continue - - path = os.path.abspath(path) - - # filter out files that match the globs in the globs file - if any([fnmatch(path, glob) for glob in exclude_globs]): - continue - - sources.append(path) - return sources - - -def stdout_pathcolonline(completed_process, filenames): - """ - given a completed process which may have reported some files as problematic - by printing the path name followed by ':' then a line number, examine - stdout and return the set of actually reported file names - """ - returncode, stdout, stderr = completed_process - bfilenames = set() - for filename in filenames: - bfilenames.add(filename.encode('utf-8') + b':') - problem_files = set() - for line in stdout.splitlines(): - for filename in bfilenames: - if line.startswith(filename): - problem_files.add(filename.decode('utf-8')) - bfilenames.remove(filename) - break - return problem_files, stdout diff --git a/cpp/build-support/run-infer.sh b/cpp/build-support/run-infer.sh deleted file mode 100755 index 7d185343706..00000000000 --- a/cpp/build-support/run-infer.sh +++ /dev/null @@ -1,48 +0,0 @@ -#!/usr/bin/env bash -# -# Licensed under the Apache License, Version 2.0 (the "License"); -# you may not use this file except in compliance with the License. -# You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, software -# distributed under the License is distributed on an "AS IS" BASIS, -# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -# See the License for the specific language governing permissions and -# limitations under the License. -# -# -# Runs infer in the given directory -# Arguments: -# $1 - Path to the infer binary -# $2 - Path to the compile_commands.json to use -# $3 - Apply infer step (1=capture, 2=analyze, 3=report) -# -INFER=$1 -shift -COMPILE_COMMANDS=$1 -shift -APPLY_STEP=$1 -shift - -if [ "$APPLY_STEP" == "1" ]; then - $INFER capture --compilation-database $COMPILE_COMMANDS - echo "" - echo "Run 'make infer-analyze' next." -elif [ "$APPLY_STEP" == "2" ]; then - # infer's analyze step can take a very long time to complete - $INFER analyze - echo "" - echo "Run 'make infer-report' next." - echo "See: http://fbinfer.com/docs/steps-for-ci.html" -elif [ "$APPLY_STEP" == "3" ]; then - $INFER report --issues-csv ./infer-out/report.csv 1> /dev/null - $INFER report --issues-txt ./infer-out/report.txt 1> /dev/null - $INFER report --issues-json ./infer-out/report.json 1> /dev/null - echo "" - echo "Reports (report.txt, report.csv, report.json) can be found in the infer-out subdirectory." -else - echo "" - echo "See: http://fbinfer.com/docs/steps-for-ci.html" -fi diff --git a/cpp/build-support/run_clang_format.py b/cpp/build-support/run_clang_format.py deleted file mode 100755 index 35b16f73ec0..00000000000 --- a/cpp/build-support/run_clang_format.py +++ /dev/null @@ -1,137 +0,0 @@ -#!/usr/bin/env python3 -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from __future__ import print_function -import lintutils -from subprocess import PIPE -import argparse -import difflib -import multiprocessing as mp -import sys -from functools import partial - - -# examine the output of clang-format and if changes are -# present assemble a (unified)patch of the difference -def _check_one_file(filename, formatted): - with open(filename, "rb") as reader: - original = reader.read() - - if formatted != original: - # Run the equivalent of diff -u - diff = list(difflib.unified_diff( - original.decode('utf8').splitlines(True), - formatted.decode('utf8').splitlines(True), - fromfile=filename, - tofile=f"{filename} (after clang format)")) - else: - diff = None - - return filename, diff - - -if __name__ == "__main__": - parser = argparse.ArgumentParser( - description="Runs clang-format on all of the source " - "files. If --fix is specified enforce format by " - "modifying in place, otherwise compare the output " - "with the existing file and output any necessary " - "changes as a patch in unified diff format") - parser.add_argument("--clang_format_binary", - required=True, - help="Path to the clang-format binary") - parser.add_argument("--exclude_globs", - help="Filename containing globs for files " - "that should be excluded from the checks") - parser.add_argument("--source_dir", - required=True, - action="append", - help="Root directory of the source code") - parser.add_argument("--fix", default=False, - action="store_true", - help="If specified, will re-format the source " - "code instead of comparing the re-formatted " - "output, defaults to %(default)s") - parser.add_argument("--quiet", default=False, - action="store_true", - help="If specified, only print errors") - arguments = parser.parse_args() - - exclude_globs = [] - if arguments.exclude_globs: - with open(arguments.exclude_globs) as f: - exclude_globs.extend(line.strip() for line in f) - - formatted_filenames = [] - for source_dir in arguments.source_dir: - for path in lintutils.get_sources(source_dir, exclude_globs): - formatted_filenames.append(str(path)) - - if arguments.fix: - if not arguments.quiet: - print("\n".join(f"Formatting {x}" for x in formatted_filenames)) - - # Break clang-format invocations into chunks: each invocation formats - # 16 files. Wait for all processes to complete - results = lintutils.run_parallel([ - [arguments.clang_format_binary, "-i"] + some - for some in lintutils.chunk(formatted_filenames, 16) - ]) - for returncode, stdout, stderr in results: - # if any clang-format reported a parse error, bubble it - if returncode != 0: - sys.exit(returncode) - - else: - # run an instance of clang-format for each source file in parallel, - # then wait for all processes to complete - results = lintutils.run_parallel([ - [arguments.clang_format_binary, filename] - for filename in formatted_filenames - ], stdout=PIPE, stderr=PIPE) - - checker_args = [] - for filename, res in zip(formatted_filenames, results): - # if any clang-format reported a parse error, bubble it - returncode, stdout, stderr = res - if returncode != 0: - print(stderr) - sys.exit(returncode) - checker_args.append((filename, stdout)) - - error = False - pool = mp.Pool() - try: - # check the output from each invocation of clang-format in parallel - for filename, diff in pool.starmap(_check_one_file, checker_args): - if not arguments.quiet: - print(f"Checking {filename}") - if diff: - print(f"{filename} had clang-format style issues") - # Print out the diff to stderr - error = True - # pad with a newline - print(file=sys.stderr) - sys.stderr.writelines(diff) - except Exception: - error = True - raise - finally: - pool.terminate() - pool.join() - sys.exit(1 if error else 0) diff --git a/cpp/build-support/run_clang_tidy.py b/cpp/build-support/run_clang_tidy.py deleted file mode 100755 index 863c5bd70ab..00000000000 --- a/cpp/build-support/run_clang_tidy.py +++ /dev/null @@ -1,126 +0,0 @@ -#!/usr/bin/env python3 -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from __future__ import print_function -import argparse -import multiprocessing as mp -import lintutils -from subprocess import PIPE -import sys -from functools import partial - - -def _get_chunk_key(filenames): - # lists are not hashable so key on the first filename in a chunk - return filenames[0] - - -# clang-tidy outputs complaints in '/path:line_number: complaint' format, -# so we can scan its output to get a list of files to fix -def _check_some_files(completed_processes, filenames): - result = completed_processes[_get_chunk_key(filenames)] - return lintutils.stdout_pathcolonline(result, filenames) - - -def _check_all(cmd, filenames): - # each clang-tidy instance will process 16 files - chunks = lintutils.chunk(filenames, 16) - cmds = [cmd + some for some in chunks] - results = lintutils.run_parallel(cmds, stderr=PIPE, stdout=PIPE) - error = False - # record completed processes (keyed by the first filename in the input - # chunk) for lookup in _check_some_files - completed_processes = { - _get_chunk_key(some): result - for some, result in zip(chunks, results) - } - checker = partial(_check_some_files, completed_processes) - pool = mp.Pool() - try: - # check output of completed clang-tidy invocations in parallel - for problem_files, stdout in pool.imap(checker, chunks): - if problem_files: - msg = "clang-tidy suggested fixes for {}" - print("\n".join(map(msg.format, problem_files))) - error = True - except Exception: - error = True - raise - finally: - pool.terminate() - pool.join() - - if error: - sys.exit(1) - - -if __name__ == "__main__": - parser = argparse.ArgumentParser( - description="Runs clang-tidy on all ") - parser.add_argument("--clang_tidy_binary", - required=True, - help="Path to the clang-tidy binary") - parser.add_argument("--exclude_globs", - help="Filename containing globs for files " - "that should be excluded from the checks") - parser.add_argument("--compile_commands", - required=True, - help="compile_commands.json to pass clang-tidy") - parser.add_argument("--source_dir", - required=True, - action="append", - help="Root directory of the source code") - parser.add_argument("--fix", default=False, - action="store_true", - help="If specified, will attempt to fix the " - "source code instead of recommending fixes, " - "defaults to %(default)s") - parser.add_argument("--quiet", default=False, - action="store_true", - help="If specified, only print errors") - arguments = parser.parse_args() - - exclude_globs = [] - if arguments.exclude_globs: - for line in open(arguments.exclude_globs): - exclude_globs.append(line.strip()) - - linted_filenames = [] - for source_dir in arguments.source_dir: - for path in lintutils.get_sources(source_dir, exclude_globs): - linted_filenames.append(path) - - if not arguments.quiet: - msg = 'Tidying {}' if arguments.fix else 'Checking {}' - print("\n".join(map(msg.format, linted_filenames))) - - cmd = [ - arguments.clang_tidy_binary, - '-p', - arguments.compile_commands - ] - if arguments.fix: - cmd.append('-fix') - results = lintutils.run_parallel( - [cmd + some for some in lintutils.chunk(linted_filenames, 16)]) - for returncode, stdout, stderr in results: - if returncode != 0: - sys.exit(returncode) - - else: - _check_all(cmd, linted_filenames) diff --git a/cpp/build-support/run_cpplint.py b/cpp/build-support/run_cpplint.py deleted file mode 100755 index ab8bb0d3166..00000000000 --- a/cpp/build-support/run_cpplint.py +++ /dev/null @@ -1,114 +0,0 @@ -#!/usr/bin/env python3 -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -from __future__ import print_function -import lintutils -from subprocess import PIPE, STDOUT -import argparse -import multiprocessing as mp -import sys -import platform -from functools import partial - - -def _get_chunk_key(filenames): - # lists are not hashable so key on the first filename in a chunk - return filenames[0] - - -def _check_some_files(completed_processes, filenames): - # cpplint outputs complaints in '/path:line_number: complaint' format, - # so we can scan its output to get a list of files to fix - result = completed_processes[_get_chunk_key(filenames)] - return lintutils.stdout_pathcolonline(result, filenames) - - -if __name__ == "__main__": - parser = argparse.ArgumentParser( - description="Runs cpplint on all of the source files.") - parser.add_argument("--cpplint_binary", - required=True, - help="Path to the cpplint binary") - parser.add_argument("--exclude_globs", - help="Filename containing globs for files " - "that should be excluded from the checks") - parser.add_argument("--source_dir", - required=True, - action="append", - help="Root directory of the source code") - parser.add_argument("--quiet", default=False, - action="store_true", - help="If specified, only print errors") - arguments = parser.parse_args() - - exclude_globs = [] - if arguments.exclude_globs: - with open(arguments.exclude_globs) as f: - exclude_globs.extend(line.strip() for line in f) - - linted_filenames = [] - for source_dir in arguments.source_dir: - for path in lintutils.get_sources(source_dir, exclude_globs): - linted_filenames.append(str(path)) - - cmd = [ - arguments.cpplint_binary, - '--verbose=2', - ] - if (arguments.cpplint_binary.endswith('.py') and - platform.system() == 'Windows'): - # Windows doesn't support executable scripts; execute with - # sys.executable - cmd.insert(0, sys.executable) - if arguments.quiet: - cmd.append('--quiet') - else: - print("\n".join(map(lambda x: f"Linting {x}", - linted_filenames))) - - # lint files in chunks: each invocation of cpplint will process 16 files - chunks = lintutils.chunk(linted_filenames, 16) - cmds = [cmd + some for some in chunks] - results = lintutils.run_parallel(cmds, stdout=PIPE, stderr=STDOUT) - - error = False - # record completed processes (keyed by the first filename in the input - # chunk) for lookup in _check_some_files - completed_processes = { - _get_chunk_key(filenames): result - for filenames, result in zip(chunks, results) - } - checker = partial(_check_some_files, completed_processes) - pool = mp.Pool() - try: - # scan the outputs of various cpplint invocations in parallel to - # distill a list of problematic files - for problem_files, stdout in pool.imap(checker, chunks): - if problem_files: - if isinstance(stdout, bytes): - stdout = stdout.decode('utf8') - print(stdout, file=sys.stderr) - error = True - except Exception: - error = True - raise - finally: - pool.terminate() - pool.join() - - sys.exit(1 if error else 0) diff --git a/cpp/cmake_modules/DefineOptions.cmake b/cpp/cmake_modules/DefineOptions.cmake index 05957d4b275..80817036481 100644 --- a/cpp/cmake_modules/DefineOptions.cmake +++ b/cpp/cmake_modules/DefineOptions.cmake @@ -269,12 +269,7 @@ takes precedence over ccache if a storage backend is configured" ON) define_option(ARROW_LARGE_MEMORY_TESTS "Enable unit tests which use large memory" OFF) #---------------------------------------------------------------------- - set_option_category("Lint") - - define_option(ARROW_ONLY_LINT "Only define the lint and check-format targets" OFF) - - define_option(ARROW_VERBOSE_LINT - "If off, 'quiet' flags will be passed to linting tools" OFF) + set_option_category("Coverage") define_option(ARROW_GENERATE_COVERAGE "Build with C++ code coverage enabled" OFF) diff --git a/dev/archery/archery/cli.py b/dev/archery/archery/cli.py index 4276e420474..c56474e78dd 100644 --- a/dev/archery/archery/cli.py +++ b/dev/archery/archery/cli.py @@ -15,7 +15,6 @@ # specific language governing permissions and limitations # under the License. -from collections import namedtuple from io import StringIO import click import json @@ -30,7 +29,7 @@ from .compat import _import_pandas from .lang.cpp import CppCMakeDefinition, CppConfiguration from .utils.cli import ArrowBool, validate_arrow_sources, add_optional_command -from .utils.lint import linter, python_numpydoc, LintValidationException +from .utils.lint import python_numpydoc, LintValidationException from .utils.logger import logger, ctx as log_ctx from .utils.source import ArrowSources from .utils.tmpdir import tmpdir @@ -246,64 +245,6 @@ def build(ctx, src, build_dir, force, targets, **kwargs): build.run(target) -LintCheck = namedtuple('LintCheck', ('option_name', 'help')) - -lint_checks = [ - LintCheck('clang-format', "Format C++ files with clang-format."), - LintCheck('clang-tidy', "Lint C++ files with clang-tidy."), - LintCheck('cpplint', "Lint C++ files with cpplint."), - LintCheck('iwyu', "Lint changed C++ files with Include-What-You-Use."), - LintCheck('python', - "Format and lint Python files with autopep8 and flake8."), - LintCheck('numpydoc', "Lint Python files with numpydoc."), - LintCheck('cmake-format', "Format CMake files with cmake-format.py."), - LintCheck('rat', - "Check all sources files for license texts via Apache RAT."), - LintCheck('r', "Lint R files."), - LintCheck('docker', "Lint Dockerfiles with hadolint."), - LintCheck('docs', "Lint docs with sphinx-lint."), -] - - -def decorate_lint_command(cmd): - """ - Decorate the lint() command function to add individual per-check options. - """ - for check in lint_checks: - option = click.option(f"--{check.option_name}/--no-{check.option_name}", - default=None, help=check.help) - cmd = option(cmd) - return cmd - - -@archery.command(short_help="Check Arrow source tree for errors") -@click.option("--src", metavar="", default=None, - callback=validate_arrow_sources, - help="Specify Arrow source directory") -@click.option("--fix", is_flag=True, type=BOOL, default=False, - help="Toggle fixing the lint errors if the linter supports it.") -@click.option("--iwyu_all", is_flag=True, type=BOOL, default=False, - help="Run IWYU on all C++ files if enabled") -@click.option("-a", "--all", is_flag=True, default=False, - help="Enable all checks.") -@click.argument("path", required=False) -@decorate_lint_command -@click.pass_context -def lint(ctx, src, fix, iwyu_all, path, **checks): - if checks.pop('all'): - # "--all" is given => enable all non-selected checks - for k, v in checks.items(): - if v is None: - checks[k] = True - if not any(checks.values()): - raise click.UsageError( - "Need to enable at least one lint check (try --help)") - try: - linter(src, fix, iwyu_all=iwyu_all, path=path, **checks) - except LintValidationException: - sys.exit(1) - - def _flatten_numpydoc_rules(rules): flattened = [] for rule in rules: diff --git a/dev/archery/archery/lang/cpp.py b/dev/archery/archery/lang/cpp.py index 3799a5b4967..1527938087d 100644 --- a/dev/archery/archery/lang/cpp.py +++ b/dev/archery/archery/lang/cpp.py @@ -62,7 +62,6 @@ def __init__(self, with_brotli=None, with_bz2=None, with_lz4=None, with_snappy=None, with_zlib=None, with_zstd=None, # extras - with_lint_only=False, use_gold_linker=True, simd_level="DEFAULT", cmake_extras=None): @@ -115,7 +114,6 @@ def __init__(self, self.with_zlib = with_zlib self.with_zstd = with_zstd - self.with_lint_only = with_lint_only self.use_gold_linker = use_gold_linker self.simd_level = simd_level @@ -184,10 +182,8 @@ def _gen_defs(self): yield ("CMAKE_EXPORT_COMPILE_COMMANDS", truthifier(True)) yield ("CMAKE_BUILD_TYPE", self.build_type) - - if not self.with_lint_only: - yield ("BUILD_WARNING_LEVEL", - or_else(self.warn_level, "production")) + yield ("BUILD_WARNING_LEVEL", + or_else(self.warn_level, "production")) # if not ctx.quiet: # yield ("ARROW_VERBOSE_THIRDPARTY_BUILD", "ON") @@ -241,8 +237,6 @@ def _gen_defs(self): yield ("ARROW_WITH_ZLIB", truthifier(self.with_zlib)) yield ("ARROW_WITH_ZSTD", truthifier(self.with_zstd)) - yield ("ARROW_LINT_ONLY", truthifier(self.with_lint_only)) - # Some configurations don't like gnu gold linker. broken_with_gold_ld = [self.with_fuzzing, self.with_gandiva] if self.use_gold_linker and not any(broken_with_gold_ld): diff --git a/dev/archery/archery/lang/python.py b/dev/archery/archery/lang/python.py index 56e1ff6f827..f73a2ce44b4 100644 --- a/dev/archery/archery/lang/python.py +++ b/dev/archery/archery/lang/python.py @@ -29,31 +29,6 @@ from ..compat import _get_module from ..utils.logger import logger -from ..utils.command import Command, capture_stdout, default_bin - - -class PythonCommand(Command): - def __init__(self, python_bin=None): - self.bin = default_bin(python_bin, "python") - - -class Flake8(Command): - def __init__(self, flake8_bin=None): - self.bin = default_bin(flake8_bin, "flake8") - - -class CythonLint(Command): - def __init__(self, cython_lint_bin=None): - self.bin = default_bin(cython_lint_bin, "cython-lint") - - -class Autopep8(Command): - def __init__(self, autopep8_bin=None): - self.bin = default_bin(autopep8_bin, "autopep8") - - @capture_stdout() - def run_captured(self, *args, **kwargs): - return self.run(*args, **kwargs) def _tokenize_signature(s): diff --git a/dev/archery/archery/utils/lint.py b/dev/archery/archery/utils/lint.py index 97f022f61ae..67957d4bff8 100644 --- a/dev/archery/archery/utils/lint.py +++ b/dev/archery/archery/utils/lint.py @@ -15,27 +15,11 @@ # specific language governing permissions and limitations # under the License. -import fnmatch -import gzip -import os -from pathlib import Path - import click -from .command import Bash, Command, default_bin from ..compat import _get_module -from .cmake import CMake -from .git import git from .logger import logger -from ..lang.cpp import CppCMakeDefinition, CppConfiguration -from ..lang.python import Autopep8, Flake8, CythonLint, NumpyDoc, PythonCommand -from .rat import Rat, exclusion_from_globs -from .tmpdir import tmpdir - - -_archery_install_msg = ( - "Please install archery using: `pip install -e dev/archery[lint]`. " -) +from ..lang.python import NumpyDoc class LintValidationException(Exception): @@ -55,237 +39,6 @@ def from_cmd(command_result): return LintResult(command_result.returncode == 0) -def cpp_linter(src, build_dir, clang_format=True, cpplint=True, - clang_tidy=False, iwyu=False, iwyu_all=False, - fix=False): - """ Run clang-format, cpplint and clang-tidy on cpp/ codebase. """ - logger.info("Running C++ linters") - - cmake = CMake() - if not cmake.available: - logger.error("cpp linter requested but cmake binary not found.") - return - - # A cmake build directory is required to populate `compile_commands.json` - # which in turn is required by clang-tidy. It also provides a convenient - # way to hide clang-format/clang-tidy invocation via the Generate - # (ninja/make) targets. - - # ARROW_LINT_ONLY exits early but ignore building compile_command.json - lint_only = not (iwyu or clang_tidy) - cmake_args = {"with_python": False, "with_lint_only": lint_only} - cmake_def = CppCMakeDefinition(src.cpp, CppConfiguration(**cmake_args)) - - build = cmake_def.build(build_dir) - if clang_format: - target = "format" if fix else "check-format" - yield LintResult.from_cmd(build.run(target, check=False)) - - if cpplint: - yield LintResult.from_cmd(build.run("lint", check=False)) - yield LintResult.from_cmd(build.run("lint_cpp_cli", check=False)) - - if clang_tidy: - yield LintResult.from_cmd(build.run("check-clang-tidy", check=False)) - - if iwyu: - if iwyu_all: - iwyu_cmd = "iwyu-all" - else: - iwyu_cmd = "iwyu" - yield LintResult.from_cmd(build.run(iwyu_cmd, check=False)) - - -class CMakeFormat(Command): - - def __init__(self, paths, cmake_format_bin=None): - self.check_version() - self.bin = default_bin(cmake_format_bin, "cmake-format") - self.paths = paths - - @classmethod - def from_patterns(cls, base_path, include_patterns, exclude_patterns): - paths = { - str(path.as_posix()) - for pattern in include_patterns - for path in base_path.glob(pattern) - } - for pattern in exclude_patterns: - pattern = (base_path / pattern).as_posix() - paths -= set(fnmatch.filter(paths, str(pattern))) - return cls(paths) - - @staticmethod - def check_version(): - try: - # cmake_format is part of the cmakelang package - import cmakelang - except ImportError: - raise ImportError( - - ) - # pin a specific version of cmake_format, must be updated in setup.py - if cmakelang.__version__ != "0.6.13": - raise LintValidationException( - f"Wrong version of cmake_format is detected. " - f"{_archery_install_msg}" - ) - - def check(self): - return self.run("-l", "error", "--check", *self.paths, check=False) - - def fix(self): - return self.run("--in-place", *self.paths, check=False) - - -def cmake_linter(src, fix=False): - """ - Run cmake-format on all CMakeFiles.txt - """ - logger.info("Running cmake-format linters") - - cmake_format = CMakeFormat.from_patterns( - src.path, - include_patterns=[ - 'ci/**/*.cmake', - 'cpp/CMakeLists.txt', - 'cpp/src/**/*.cmake', - 'cpp/src/**/*.cmake.in', - 'cpp/src/**/CMakeLists.txt', - 'cpp/examples/**/CMakeLists.txt', - 'cpp/cmake_modules/*.cmake', - 'go/**/CMakeLists.txt', - 'java/**/CMakeLists.txt', - 'matlab/**/CMakeLists.txt', - 'python/**/CMakeLists.txt', - ], - exclude_patterns=[ - 'cpp/cmake_modules/FindNumPy.cmake', - 'cpp/cmake_modules/FindPythonLibsNew.cmake', - 'cpp/cmake_modules/UseCython.cmake', - 'cpp/src/arrow/util/*.h.cmake', - ] - ) - method = cmake_format.fix if fix else cmake_format.check - - yield LintResult.from_cmd(method()) - - -def python_linter(src, fix=False): - """Run Python linters on python/pyarrow, python/examples, setup.py - and dev/. """ - setup_py = os.path.join(src.python, "setup.py") - setup_cfg = os.path.join(src.python, "setup.cfg") - - logger.info("Running Python formatter (autopep8)") - - autopep8 = Autopep8() - if not autopep8.available: - logger.error( - "Python formatter requested but autopep8 binary not found. " - f"{_archery_install_msg}") - return - - # Gather files for autopep8 - patterns = ["python/benchmarks/**/*.py", - "python/examples/**/*.py", - "python/pyarrow/**/*.py", - "python/pyarrow/**/*.pyx", - "python/pyarrow/**/*.pxd", - "python/pyarrow/**/*.pxi", - "dev/*.py", - "dev/archery/**/*.py", - "dev/release/**/*.py"] - files = [setup_py] - for pattern in patterns: - files += list(map(str, Path(src.path).glob(pattern))) - - args = ['--global-config', setup_cfg, '--ignore-local-config'] - if fix: - args += ['-j0', '--in-place'] - args += sorted(files) - yield LintResult.from_cmd(autopep8(*args)) - else: - # XXX `-j0` doesn't work well with `--exit-code`, so instead - # we capture the diff and check whether it's empty - # (https://github.com/hhatto/autopep8/issues/543) - args += ['-j0', '--diff'] - args += sorted(files) - diff = autopep8.run_captured(*args) - if diff: - print(diff.decode('utf8')) - yield LintResult(success=False) - else: - yield LintResult(success=True) - - # Run flake8 after autopep8 (the latter may have modified some files) - logger.info("Running Python linter (flake8)") - - flake8 = Flake8() - if not flake8.available: - logger.error( - "Python linter requested but flake8 binary not found. " - f"{_archery_install_msg}") - return - - flake8_exclude = ['.venv*', 'vendored'] - - yield LintResult.from_cmd( - flake8("--extend-exclude=" + ','.join(flake8_exclude), - "--config=" + os.path.join(src.python, "setup.cfg"), - setup_py, src.pyarrow, os.path.join(src.python, "benchmarks"), - os.path.join(src.python, "examples"), src.dev, check=False)) - - logger.info("Running Cython linter (cython-lint)") - - cython_lint = CythonLint() - if not cython_lint.available: - logger.error( - "Cython linter requested but cython-lint binary not found. " - f"{_archery_install_msg}") - return - - # Gather files for cython-lint - patterns = ["python/pyarrow/**/*.pyx", - "python/pyarrow/**/*.pxd", - "python/pyarrow/**/*.pxi", - "python/examples/**/*.pyx", - "python/examples/**/*.pxd", - "python/examples/**/*.pxi", - ] - files = [] - for pattern in patterns: - files += list(map(str, Path(src.path).glob(pattern))) - args = ['--no-pycodestyle'] - args += sorted(files) - yield LintResult.from_cmd(cython_lint(*args)) - - -def python_cpp_linter(src, clang_format=True, fix=False): - """Run C++ linters on python/pyarrow/src/arrow/python.""" - cpp_src = os.path.join(src.python, "pyarrow", "src", "arrow", "python") - - python = PythonCommand() - - if clang_format: - logger.info("Running clang-format for python/pyarrow/src/arrow/python") - - if "CLANG_TOOLS_PATH" in os.environ: - clang_format_binary = os.path.join( - os.environ["CLANG_TOOLS_PATH"], "clang-format") - else: - clang_format_binary = "clang-format-14" - - run_clang_format = os.path.join(src.cpp, "build-support", - "run_clang_format.py") - args = [run_clang_format, "--source_dir", cpp_src, - "--clang_format_binary", clang_format_binary] - if fix: - args += ["--fix"] - - yield LintResult.from_cmd(python.run(*args)) - - def python_numpydoc(symbols=None, allow_rules=None, disallow_rules=None): """Run numpydoc linter on python. @@ -369,165 +122,3 @@ def python_numpydoc(symbols=None, allow_rules=None, disallow_rules=None): click.echo(click.style(msg, fg='red')) yield LintResult(success=False) - - -def rat_linter(src, root): - """Run apache-rat license linter.""" - logger.info("Running apache-rat linter") - - if src.git_dirty: - logger.warn("Due to the usage of git-archive, uncommitted files will" - " not be checked for rat violations. ") - - exclusion = exclusion_from_globs( - os.path.join(src.dev, "release", "rat_exclude_files.txt")) - - # Creates a git-archive of ArrowSources, apache-rat expects a gzip - # compressed tar archive. - archive_path = os.path.join(root, "apache-arrow.tar.gz") - src.archive(archive_path, compressor=gzip.compress) - report = Rat().report(archive_path) - - violations = list(report.validate(exclusion=exclusion)) - for violation in violations: - print(f"apache-rat license violation: {violation}") - - yield LintResult(len(violations) == 0) - - -def r_linter(src): - """Run R linter.""" - logger.info("Running R linter") - r_lint_sh = os.path.join(src.r, "lint.sh") - yield LintResult.from_cmd(Bash().run(r_lint_sh, check=False)) - - -class Hadolint(Command): - def __init__(self, hadolint_bin=None): - self.bin = default_bin(hadolint_bin, "hadolint") - - -def is_docker_image(path): - dirname = os.path.dirname(path) - filename = os.path.basename(path) - - excluded = dirname.startswith( - "dev") or dirname.startswith("python/manylinux") - - return filename.startswith("Dockerfile") and not excluded - - -def docker_linter(src): - """Run Hadolint docker linter.""" - logger.info("Running Docker linter") - - hadolint = Hadolint() - - if not hadolint.available: - logger.error( - "hadolint linter requested but hadolint binary not found.") - return - - for path in git.ls_files(git_dir=src.path): - if is_docker_image(path): - yield LintResult.from_cmd(hadolint.run(path, check=False, - cwd=src.path)) - - -class SphinxLint(Command): - def __init__(self, src, path=None, sphinx_lint_bin=None, disable=None, enable=None): - self.src = src - self.path = path - self.bin = default_bin(sphinx_lint_bin, "sphinx-lint") - self.disable = disable or "all" - self.enable = enable - - def lint(self, *args, check=False): - docs_path = os.path.join(self.src.path, "docs") - - args = [] - - if self.disable: - args.extend(["--disable", self.disable]) - - if self.enable: - args.extend(["--enable", self.enable]) - - if self.path is not None: - args.extend([self.path]) - else: - args.extend([docs_path]) - - return self.run(*args, check=check) - - -def docs_linter(src, path=None): - """Run sphinx-lint on docs.""" - logger.info("Running docs linter (sphinx-lint)") - - sphinx_lint = SphinxLint( - src, - path=path, - disable="all", - enable="trailing-whitespace,missing-final-newline" - ) - - if not sphinx_lint.available: - logger.error("sphinx-lint linter requested but sphinx-lint binary not found") - return - - yield LintResult.from_cmd(sphinx_lint.lint()) - - -def linter(src, fix=False, path=None, *, clang_format=False, cpplint=False, - clang_tidy=False, iwyu=False, iwyu_all=False, - python=False, numpydoc=False, cmake_format=False, rat=False, - r=False, docker=False, docs=False): - """Run all linters.""" - with tmpdir(prefix="arrow-lint-") as root: - build_dir = os.path.join(root, "cpp-build") - - # Linters yield LintResult without raising exceptions on failure. - # This allows running all linters in one pass and exposing all - # errors to the user. - results = [] - - if clang_format or cpplint or clang_tidy or iwyu: - results.extend(cpp_linter(src, build_dir, - clang_format=clang_format, - cpplint=cpplint, - clang_tidy=clang_tidy, - iwyu=iwyu, - iwyu_all=iwyu_all, - fix=fix)) - - if python: - results.extend(python_linter(src, fix=fix)) - - if python and clang_format: - results.extend(python_cpp_linter(src, - clang_format=clang_format, - fix=fix)) - - if numpydoc: - results.extend(python_numpydoc()) - - if cmake_format: - results.extend(cmake_linter(src, fix=fix)) - - if rat: - results.extend(rat_linter(src, root)) - - if r: - results.extend(r_linter(src)) - - if docker: - results.extend(docker_linter(src)) - - if docs: - results.extend(docs_linter(src, path)) - - # Raise error if one linter failed, ensuring calling code can exit with - # non-zero. - for result in results: - result.ok() diff --git a/dev/archery/archery/utils/rat.py b/dev/archery/archery/utils/rat.py deleted file mode 100644 index 12b127f10b5..00000000000 --- a/dev/archery/archery/utils/rat.py +++ /dev/null @@ -1,70 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -import fnmatch -import re -from xml.etree import ElementTree - -from ..lang.java import Jar -from .cache import Cache -from .command import capture_stdout - -RAT_VERSION = 0.13 -RAT_JAR_FILENAME = f"apache-rat-{RAT_VERSION}.jar" -RAT_URL_ = "https://repo1.maven.org/maven2/org/apache/rat/apache-rat" -RAT_URL = "/".join([RAT_URL_, str(RAT_VERSION), RAT_JAR_FILENAME]) - - -class Rat(Jar): - def __init__(self): - jar = Cache().get_or_insert_from_url(RAT_JAR_FILENAME, RAT_URL) - Jar.__init__(self, jar) - - @capture_stdout(strip=False) - def run_report(self, archive_path, **kwargs): - return self.run("--xml", archive_path, **kwargs) - - def report(self, archive_path, **kwargs): - return RatReport(self.run_report(archive_path, **kwargs)) - - -def exclusion_from_globs(exclusions_path): - with open(exclusions_path, 'r') as exclusions_fd: - exclusions = [e.strip() for e in exclusions_fd] - return lambda path: any([fnmatch.fnmatch(path, e) for e in exclusions]) - - -class RatReport: - def __init__(self, xml): - self.xml = xml - self.tree = ElementTree.fromstring(xml) - - def __repr__(self): - return f"RatReport({self.xml})" - - def validate(self, exclusion=None): - for r in self.tree.findall('resource'): - approvals = r.findall('license-approval') - if not approvals or approvals[0].attrib['name'] == 'true': - continue - - clean_name = re.sub('^[^/]+/', '', r.attrib['name']) - - if exclusion and exclusion(clean_name): - continue - - yield clean_name diff --git a/dev/archery/setup.py b/dev/archery/setup.py index 6587e61546b..33b9591dee2 100755 --- a/dev/archery/setup.py +++ b/dev/archery/setup.py @@ -36,8 +36,6 @@ 'docker': ['ruamel.yaml', 'python-dotenv'], 'integration': ['cffi', 'numpy'], 'integration-java': ['jpype1'], - 'lint': ['numpydoc==1.1.0', 'autopep8', 'flake8==6.1.0', 'cython-lint', - 'cmake_format==0.6.13', 'sphinx-lint==0.9.1'], 'numpydoc': ['numpydoc==1.1.0'], 'release': ['pygithub', jinja_req, 'semver', 'gitpython'], } diff --git a/docker-compose.yml b/docker-compose.yml index 96013d2b680..2eb8424aee5 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -142,7 +142,6 @@ x-hierarchy: - ubuntu-cpp-static - ubuntu-c-glib: - ubuntu-ruby - - ubuntu-lint - ubuntu-python - ubuntu-python-sdist-test - ubuntu-python-313-freethreading @@ -1870,34 +1869,6 @@ services: /arrow/ci/scripts/c_glib_build.sh /arrow /build && /arrow/ci/scripts/r_build.sh /arrow /build" - ################################# Tools ##################################### - - ubuntu-lint: - # Usage: - # docker compose build ubuntu-cpp - # docker compose build ubuntu-lint - # docker compose run ubuntu-lint - image: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-lint - build: - context: . - dockerfile: ci/docker/linux-apt-lint.dockerfile - cache_from: - - ${REPO}:${ARCH}-ubuntu-${UBUNTU}-lint - args: - base: ${REPO}:${ARCH}-ubuntu-${UBUNTU}-cpp - clang_tools: ${CLANG_TOOLS} - environment: - <<: [*common, *ccache] - volumes: *ubuntu-volumes - command: > - /bin/bash -c " - git config --global --add safe.directory /arrow && - python3 -m venv /build/pyvenv && - source /build/pyvenv/bin/activate && - pip install -U pip setuptools && - pip install arrow/dev/archery[lint] && - archery lint --all --no-clang-tidy --no-iwyu --no-numpydoc --src /arrow" - ######################### Integration Tests ################################# postgres: diff --git a/docs/source/developers/cpp/development.rst b/docs/source/developers/cpp/development.rst index 1f899a5ce78..1137438a2ab 100644 --- a/docs/source/developers/cpp/development.rst +++ b/docs/source/developers/cpp/development.rst @@ -102,128 +102,13 @@ following checks: compiler warnings with ``-DBUILD_WARNING_LEVEL=CHECKIN``. Note that there are classes of warnings (such as ``-Wdocumentation``, see more on this below) that are not caught by ``gcc``. -* Passes various C++ (and others) style checks, checked with the ``lint`` - subcommand to :ref:`Archery `. This can also be fixed locally - by running ``archery lint --cpplint --clang-format --clang-tidy --fix``. -* CMake files pass style checks, can be fixed by running - ``archery lint --cmake-format --fix``. This requires Python - 3 and `cmake_format `_ (note: - this currently does not work on Windows). +* Passes various C++ (and others) style checks by running + ``pre-commit run --show-diff-on-failure --color=always --all-files + cpp``. On pull requests, the "Dev / Lint" pipeline will run these checks, and report what files/lines need to be fixed, if any. -In order to account for variations in the behavior of ``clang-format`` between -major versions of LLVM, we pin the version of ``clang-format`` used. You can -confirm the current pinned version by finding the ``CLANG_TOOLS`` variable -value in `.env `_. Note that -the version must match exactly; a newer version (even a patch release) will -not work. LLVM can be installed through a system package manager or a package -manager like Conda or Homebrew, though note they may not offer the exact -version needed. Alternatively, binaries can be directly downloaded from the -`LLVM website `_. - -For convenience, C++ style checks can run via a build, in addition to -Archery. To do so, build one or more of the targets ``format`` (for -clang-format), ``lint_cpp_cli``, ``lint`` (for cpplint), or -``clang-tidy``. For example:: - - $ cmake -GNinja ../cpp ... - $ ninja format lint clang-tidy lint_cpp_cli - -Depending on how you installed clang-format, the build system may not be able -to find it. In that case, invoking CMake will show errors like the following:: - - -- clang-format 12 not found - -Or if the wrong version is installed:: - - -- clang-format found, but version did not match "^clang-format version 12" - -You can provide an explicit path to the directory containing the clang-format -executable and others with the environment variable ``$CLANG_TOOLS_PATH``, or -by passing ``-DClangTools_PATH=$PATH_TO_CLANG_TOOLS`` when invoking CMake. For -example:: - - # We unpacked LLVM here: - $ ~/tools/bin/clang-format --version - clang-format version 12.0.0 - # Pass the directory containing the tools to CMake - $ cmake ../cpp -DClangTools_PATH=~/tools/bin/ - ...snip... - -- clang-tidy found at /home/user/tools/bin/clang-tidy - -- clang-format found at /home/user/tools/bin/clang-format - ...snip... - -To make linting more reproducible for everyone, we provide a ``docker compose`` -target that is executable from the root of the repository: - -.. code-block:: - - $ docker compose run ubuntu-lint - -Cleaning includes with include-what-you-use (IWYU) -~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ - -We occasionally use Google's `include-what-you-use -`_ tool, also -known as IWYU, to remove unnecessary imports. - -To begin using IWYU, you must first build it by following the instructions in -the project's documentation. Once the ``include-what-you-use`` executable is in -your ``$PATH``, you must run CMake with ``-DCMAKE_EXPORT_COMPILE_COMMANDS=ON`` -in a new out-of-source CMake build directory like so: - -.. code-block:: shell - - mkdir -p $ARROW_ROOT/cpp/iwyu - cd $ARROW_ROOT/cpp/iwyu - cmake -DCMAKE_EXPORT_COMPILE_COMMANDS=ON \ - -DARROW_BUILD_BENCHMARKS=ON \ - -DARROW_BUILD_BENCHMARKS_REFERENCE=ON \ - -DARROW_BUILD_TESTS=ON \ - -DARROW_BUILD_UTILITIES=ON \ - -DARROW_COMPUTE=ON \ - -DARROW_CSV=ON \ - -DARROW_DATASET=ON \ - -DARROW_FILESYSTEM=ON \ - -DARROW_FLIGHT=ON \ - -DARROW_GANDIVA=ON \ - -DARROW_HDFS=ON \ - -DARROW_JSON=ON \ - -DARROW_PARQUET=ON \ - -DARROW_S3=ON \ - -DARROW_WITH_BROTLI=ON \ - -DARROW_WITH_BZ2=ON \ - -DARROW_WITH_LZ4=ON \ - -DARROW_WITH_SNAPPY=ON \ - -DARROW_WITH_ZLIB=ON \ - -DARROW_WITH_ZSTD=ON \ - .. - -In order for IWYU to run on the desired component in the codebase, it must be -enabled by the CMake configuration flags. Once this is done, you can run IWYU -on the whole codebase by running a helper ``iwyu.sh`` script: - -.. code-block:: shell - - IWYU_SH=$ARROW_ROOT/cpp/build-support/iwyu/iwyu.sh - ./$IWYU_SH - -Since this is very time consuming, you can check a subset of files matching -some string pattern with the special "match" option - -.. code-block:: shell - - ./$IWYU_SH match $PATTERN - -For example, if you wanted to do IWYU checks on all files in -``src/arrow/array``, you could run - -.. code-block:: shell - - ./$IWYU_SH match arrow/array - Checking for ABI and API stability ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ diff --git a/docs/source/developers/guide/step_by_step/styling.rst b/docs/source/developers/guide/step_by_step/styling.rst index c155acb3895..03d66e3c7b9 100644 --- a/docs/source/developers/guide/step_by_step/styling.rst +++ b/docs/source/developers/guide/step_by_step/styling.rst @@ -31,14 +31,7 @@ linters and styling of the code. .. tab-item:: PyArrow - We use flake8 linting for styling issues in Python. To help - developers check styling of the code, among other common - development tasks, :ref:`Archery utility` tool was - developed within Apache Arrow. - - The instructions on how to set up and use Archery - can be found in the Coding Style section of the - :ref:`python-development`. + :ref:`python-coding-style`. .. tab-item:: R package diff --git a/docs/source/developers/guide/tutorials/python_tutorial.rst b/docs/source/developers/guide/tutorials/python_tutorial.rst index c12c4489aee..3838acb9fcf 100644 --- a/docs/source/developers/guide/tutorials/python_tutorial.rst +++ b/docs/source/developers/guide/tutorials/python_tutorial.rst @@ -335,26 +335,12 @@ Check styling ------------- At the end we also need to check the styling. In Arrow we use a -utility called `Archery `_ -to check if code is in line with PEP 8 style guide. +utility called `pre-commit `_ to check if +code is in line with PEP 8 style guide. .. code:: console - $ archery lint --python --fix - INFO:archery:Running Python formatter (autopep8) - INFO:archery:Running Python linter (flake8) - /Users/alenkafrim/repos/arrow/python/pyarrow/tests/test_compute.py:2288:80: E501 line too long (88 > 79 characters) - -With the ``--fix`` command Archery will attempt to fix style issues, -but some issues like line length can't be fixed automatically. -We should make the necessary corrections ourselves and run -Archery again. - -.. code:: console - - $ archery lint --python --fix - INFO:archery:Running Python formatter (autopep8) - INFO:archery:Running Python linter (flake8) + $ pre-commit run --show-diff-on-failure --color=always --all-files python Done. Now lets make the Pull Request! diff --git a/docs/source/developers/python.rst b/docs/source/developers/python.rst index dd43f853a0b..6a254fdd5be 100644 --- a/docs/source/developers/python.rst +++ b/docs/source/developers/python.rst @@ -26,33 +26,18 @@ Python Development This page provides general Python development guidelines and source build instructions for all platforms. +.. _python-coding-style: + Coding Style ============ We follow a similar PEP8-like coding style to the `pandas project -`_. To check style issues, use the -:ref:`Archery ` subcommand ``lint``: - -.. code-block:: - - $ pip install -e "arrow/dev/archery[lint]" - -.. code-block:: - - $ archery lint --python - -Some of the issues can be automatically fixed by passing the ``--fix`` option: - -.. code-block:: - - $ archery lint --python --fix - -The Python code base also includes some C++ files. To fix formatting in those -files, add the ``--clang-format`` option: +`_. To fix style issues, use the +``pre-commit`` command: .. code-block:: - $ archery lint --python --clang-format --fix + $ pre-commit run --show-diff-on-failure --color=always --all-files python .. _python-unit-testing: diff --git a/r/lint.sh b/r/lint.sh deleted file mode 100755 index 21e73747333..00000000000 --- a/r/lint.sh +++ /dev/null @@ -1,54 +0,0 @@ -#!/usr/bin/env bash - -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# This script requires Python 3 and clang-format, which should already be -# on your system. See r/README.md for further guidance - -set -e - -SOURCE_DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" -CPP_BUILD_SUPPORT=$SOURCE_DIR/../cpp/build-support - -# Run clang-format -if [ -z "${CLANG_FORMAT:-}" ]; then - CLANG_TOOLS=$(. "${SOURCE_DIR}/../.env" && echo ${CLANG_TOOLS}) - if type clang-format-${CLANG_TOOLS} >/dev/null 2>&1; then - CLANG_FORMAT=clang-format-${CLANG_TOOLS} - elif type brew >/dev/null 2>&1; then - CLANG_FORMAT=$(brew --prefix llvm@${CLANG_TOOLS})/bin/clang-format - else - CLANG_FORMAT=clang-format - fi -fi -$CPP_BUILD_SUPPORT/run_clang_format.py \ - --clang_format_binary=$CLANG_FORMAT \ - --exclude_glob=$CPP_BUILD_SUPPORT/lint_exclusions.txt \ - --source_dir=$SOURCE_DIR/src --quiet $1 - - -# Run cpplint -CPPLINT=$CPP_BUILD_SUPPORT/cpplint.py -$CPP_BUILD_SUPPORT/run_cpplint.py \ - --cpplint_binary=$CPPLINT \ - --exclude_glob=$CPP_BUILD_SUPPORT/lint_exclusions.txt \ - --source_dir=$SOURCE_DIR/src --quiet - -# Run lintr -R -e "if(!requireNamespace('lintr', quietly=TRUE)){stop('lintr is not installed, please install it with R -e \"install.packages(\'lintr\')\"')}" -NOT_CRAN=true R -e "lintr::lint_package('${SOURCE_DIR}')" diff --git a/r/tools/lint.R b/r/tools/lint.R deleted file mode 100755 index f64cc2c6e54..00000000000 --- a/r/tools/lint.R +++ /dev/null @@ -1,26 +0,0 @@ -#!/usr/bin/env Rscript -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -lints <- lintr::lint_package("r") -if (length(lints) == 0) { - q("no") -} - -print(lints) -q("no", status = 1) diff --git a/r/vignettes/developers/workflow.Rmd b/r/vignettes/developers/workflow.Rmd index 42c8030117f..be109ed5a96 100644 --- a/r/vignettes/developers/workflow.Rmd +++ b/r/vignettes/developers/workflow.Rmd @@ -51,33 +51,30 @@ pkgdown::build_site(preview=TRUE) ## Styling and linting -### R code +Styling and linting can be set up and performed entirely with the [pre-commit](https://pre-commit.com/) tool: -The R code in the package follows [the tidyverse style](https://style.tidyverse.org/). On PR submission (and on pushes) our CI will run linting and will flag possible errors on the pull request with annotations. +```bash +pre-commit run --show-diff-on-failure --color=always --all-files r +``` -To run the linter locally, install the `{lintr}` package (note, we currently use a fork that includes fixes not yet accepted upstream, see how lintr is being installed in the file [`ci/docker/linux-apt-lint.dockerfile`](https://github.com/apache/arrow/blob/main/ci/docker/linux-apt-lint.dockerfile) for the current status) and then run +See also the following subsections our styling and lint details for R and C++ codes. -```r -lintr::lint_package("arrow/r") -``` +### R code -You can automatically change the formatting of the code in the package using the [styler](https://styler.r-lib.org/) package. +The R code in the package follows [the tidyverse style](https://style.tidyverse.org/). On PR submission (and on pushes) our CI will run linting and will flag possible errors on the pull request with annotations. -Run the styler locally either via Makefile commands: +You can automatically change the formatting of the code in the package using the [styler](https://styler.r-lib.org/) package. -```bash -make style # (for only the files changed) -make style-all # (for all files) -``` +The styler package will fix many styling errors, thought not all lintr errors are automatically fixable with styler. The list of files we intentionally do not style is in `r/.styler_excludes.R`. -or in R: +Linting and styling with [pre-commit](https://pre-commit.com/) as described above is the best way to ensure your changes are being checked properly but you can also run the tools individually if you prefer: ```r -# note the file that should not be styled -styler::style_pkg(exclude_files = c("data-raw/codegen.R")) +lintr::lint_package() # for linting +styler::style_pkg() # for styling ``` -The styler package will fix many styling errors, thought not all lintr errors are automatically fixable with styler. The list of files we intentionally do not style is in `r/.styler_excludes.R`. +Note: To run lintr, we require the `cyclocomp` package to be installed first. ### C++ code @@ -94,39 +91,6 @@ use an editors/IDE that formats your code for you. Many popular editors/IDEs have support for running `clang-format` on C++ files when you save them. Installing/enabling the appropriate plugin may save you much frustration. -Check for style errors with - -```bash -./lint.sh -``` - -Fix any style issues before committing with - -```bash -./lint.sh --fix -``` - -The lint script requires Python 3 and `clang-format`. If the command -isn't found, you can explicitly provide the path to it like: - -```bash -CLANG_FORMAT=/opt/llvm/bin/clang-format ./lint.sh -``` - -You can see what version of `clang-format` is required by the following -command: - -```bash -(. ../.env && echo ${CLANG_TOOLS}) -``` - -_Note_ that the lint script requires Python 3 and the Python dependencies -(note that `cmake_format is pinned to a specific version): - -* autopep8 -* flake8 -* cmake_format==0.5.2 - ## Running tests Tests can be run either using `devtools::test()` or the Makefile alternative. @@ -209,4 +173,3 @@ This runs each of the R-related CI tasks. See the `r:` group definition near the beginning of the [crossbow configuration](https://github.com/apache/arrow/blob/main/dev/tasks/tasks.yml) for a list of glob expression patterns that match names of items in the `tasks:` list below it. - From d82c44e7f62cffb5479f4f4c07ac16d4b319133b Mon Sep 17 00:00:00 2001 From: Sutou Kouhei Date: Fri, 13 Jun 2025 16:13:47 +0900 Subject: [PATCH 48/63] GH-46798: [CI][Dev] Add support for pre-commit 2.17.0 (#46799) ### Rationale for this change Ubuntu 22.04 ships pre-commit 2.17.0. So we should support pre-commit 2.17.0 for easy to develop. ### What changes are included in this PR? * Use a bit older shfmt pre-commit configuration for pre-commit 2.17.0 * Use Ubuntu 22.04 on CI to ensure working on Ubuntu 22.04 ### Are these changes tested? Yes. ### Are there any user-facing changes? No. * GitHub Issue: #46798 Authored-by: Sutou Kouhei Signed-off-by: Sutou Kouhei --- .github/workflows/dev.yml | 15 +++++++-------- .pre-commit-config.yaml | 5 ++++- 2 files changed, 11 insertions(+), 9 deletions(-) diff --git a/.github/workflows/dev.yml b/.github/workflows/dev.yml index a8e7b396760..dda648c8c47 100644 --- a/.github/workflows/dev.yml +++ b/.github/workflows/dev.yml @@ -41,7 +41,8 @@ jobs: lint: name: Lint C++, Python, R, Docker, RAT - runs-on: ubuntu-24.04 + # Use Ubuntu 22.04 to ensure working pre-commit on Ubuntu 22.04. + runs-on: ubuntu-22.04 if: ${{ !contains(github.event.pull_request.title, 'WIP') }} timeout-minutes: 15 steps: @@ -49,15 +50,13 @@ jobs: uses: actions/checkout@3df4ab11eba7bda6032a0b82a6bb43b11571feac # v4.0.0 with: fetch-depth: 0 - - name: Setup Python - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0 - with: - python-version: 3.12 - - uses: r-lib/actions/setup-r@bd49c52ffe281809afa6f0fecbf37483c5dd0b93 # v2.11.3 - name: Install pre-commit run: | - python -m pip install pre-commit - pre-commit run --show-diff-on-failure --color=always + sudo apt update + sudo apt install -y -V \ + pre-commit \ + r-cran-xml2 \ + ruby-dev - name: Cache pre-commit uses: actions/cache@v4 with: diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 9598d6d44f6..ffe7a23f737 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -358,7 +358,10 @@ repos: ?^swift/gen-protobuffers\.sh$| ) - repo: https://github.com/scop/pre-commit-shfmt - rev: v3.11.0-1 + # v3.11.0-1 or later requires pre-commit 3.2.0 or later but Ubuntu + # 22.04 ships pre-commit 2.17.0. We can use update this rev after + # Ubuntu 22.04 reached EOL (June 2027). + rev: v3.10.0-1 hooks: - id: shfmt alias: shell From e717fc5616fff04da0adc9a51008082c29322280 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ra=C3=BAl=20Cumplido?= Date: Fri, 13 Jun 2025 10:03:46 +0200 Subject: [PATCH 49/63] GH-25025: [C++] Move non core compute kernels into separate shared library (#46261) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ### Rationale for this change Arrow is quite a heavy dependency and some users don't need all the tools we provide bundled with libarrow. Moving Arrow Compute to its own shared library allows users installations that better suit their needs having smaller memory footprint if necessary. It might also help some users adding new kernels into an existing Arrow without recompiling it. ### What changes are included in this PR? - Move all the Arrow Compute kernel functions to a new `ArrowCompute` shared library (`libarrow_compute.so`). - Create a new API to Initialize arrow compute registering the existing Kernels into the `FunctionRegistry` - Update Python/R/CGLib bindings to automatically register the Compute kernels transparently to the user. - Update Linux Packaging to provide the new arrow-compute library. - Update documentation with new requirements to call `arrow::compute::Initialize()` ### Are these changes tested? Yes on all CI jobs. ### Are there any user-facing changes? Yes. The Arrow compute functions will be provided as a different library. Any user using Arrow Compute from C++ directly will require a call to `arrow::compute::Initialize()` in order for the functions and kernels to be registered **This PR includes breaking changes to public APIs.** * GitHub Issue: #25025 Lead-authored-by: Raúl Cumplido Co-authored-by: Bryce Mecum Co-authored-by: Sutou Kouhei Co-authored-by: Rossi Sun Co-authored-by: Antoine Pitrou Signed-off-by: Raúl Cumplido --- c_glib/arrow-glib/compute.cpp | 23 ++++ c_glib/arrow-glib/compute.h | 4 + c_glib/arrow-glib/meson.build | 2 +- c_glib/meson.build | 13 +- c_glib/test/run-test.rb | 1 + cpp/CMakeLists.txt | 7 + cpp/examples/arrow/CMakeLists.txt | 8 +- .../arrow/compute_and_write_csv_example.cc | 1 + cpp/examples/arrow/join_example.cc | 1 + cpp/examples/tutorial_examples/CMakeLists.txt | 2 +- .../tutorial_examples/compute_example.cc | 3 + .../tutorial_examples/dataset_example.cc | 3 + cpp/src/arrow/ArrowComputeConfig.cmake.in | 38 ++++++ cpp/src/arrow/CMakeLists.txt | 122 +++++++++++++----- cpp/src/arrow/acero/ArrowAceroConfig.cmake.in | 6 +- cpp/src/arrow/acero/CMakeLists.txt | 13 +- cpp/src/arrow/acero/arrow-acero.pc.in | 2 +- cpp/src/arrow/arrow-compute.pc.in | 28 ++++ cpp/src/arrow/c/CMakeLists.txt | 23 +++- cpp/src/arrow/compute/CMakeLists.txt | 27 +++- cpp/src/arrow/compute/api.h | 1 + cpp/src/arrow/compute/initialize.cc | 80 ++++++++++++ cpp/src/arrow/compute/initialize.h | 32 +++++ cpp/src/arrow/compute/kernels/CMakeLists.txt | 4 +- .../arrow/compute/kernels/aggregate_basic.cc | 1 + .../arrow/compute/kernels/chunked_internal.h | 5 +- .../arrow/compute/kernels/codegen_internal.h | 10 +- .../arrow/compute/kernels/hash_aggregate.cc | 2 + .../compute/kernels/scalar_temporal_unary.cc | 1 + cpp/src/arrow/compute/kernels/vector_hash.cc | 1 + .../arrow/compute/kernels/vector_replace.cc | 1 + cpp/src/arrow/compute/key_hash_internal.h | 5 +- cpp/src/arrow/compute/key_map_internal.h | 3 +- cpp/src/arrow/compute/light_array_internal.h | 27 ++-- cpp/src/arrow/compute/registry.cc | 44 ------- cpp/src/arrow/compute/row/CMakeLists.txt | 7 +- cpp/src/arrow/compute/row/compare_internal.h | 3 +- cpp/src/arrow/compute/row/encode_internal.h | 3 +- cpp/src/arrow/compute/row/grouper.h | 7 +- cpp/src/arrow/compute/row/grouper_internal.h | 2 +- .../arrow/compute/row/row_encoder_internal.h | 13 +- cpp/src/arrow/compute/row/row_internal.h | 5 +- cpp/src/arrow/compute/test_env.cc | 42 ++++++ cpp/src/arrow/compute/util.h | 66 +++++----- cpp/src/arrow/compute/util_internal.h | 3 +- cpp/src/arrow/compute/visibility.h | 49 +++++++ cpp/src/arrow/dataset/CMakeLists.txt | 4 +- cpp/src/arrow/engine/CMakeLists.txt | 1 + cpp/src/arrow/flight/sql/CMakeLists.txt | 4 + cpp/src/arrow/type.h | 1 + .../apache-arrow/debian/control.in | 27 +++- .../debian/libarrow-compute-dev.install | 4 + .../debian/libarrow-compute2100.install | 1 + .../apache-arrow/debian/libarrow-dev.install | 1 - .../apache-arrow/yum/arrow.spec.in | 39 +++++- dev/tasks/tasks.yml | 8 ++ docs/source/cpp/compute.rst | 14 +- python/CMakeLists.txt | 18 ++- python/pyarrow/_compute.pyx | 4 + python/pyarrow/includes/libarrow.pxd | 2 + r/R/arrowExports.R | 4 + r/configure | 7 + r/configure.win | 19 ++- r/data-raw/codegen.R | 1 + r/src/arrowExports.cpp | 10 ++ r/src/compute.cpp | 6 + ruby/red-arrow/lib/arrow/loader.rb | 1 + 67 files changed, 748 insertions(+), 172 deletions(-) create mode 100644 cpp/src/arrow/ArrowComputeConfig.cmake.in create mode 100644 cpp/src/arrow/arrow-compute.pc.in create mode 100644 cpp/src/arrow/compute/initialize.cc create mode 100644 cpp/src/arrow/compute/initialize.h create mode 100644 cpp/src/arrow/compute/test_env.cc create mode 100644 cpp/src/arrow/compute/visibility.h create mode 100644 dev/tasks/linux-packages/apache-arrow/debian/libarrow-compute-dev.install create mode 100644 dev/tasks/linux-packages/apache-arrow/debian/libarrow-compute2100.install diff --git a/c_glib/arrow-glib/compute.cpp b/c_glib/arrow-glib/compute.cpp index 9b9faeb4495..5f494f3bc7b 100644 --- a/c_glib/arrow-glib/compute.cpp +++ b/c_glib/arrow-glib/compute.cpp @@ -36,6 +36,7 @@ #include #include +#include template typename ArrowType::c_type @@ -160,6 +161,9 @@ G_BEGIN_DECLS * @title: Computation on data * @include: arrow-glib/arrow-glib.h * + * You must call garrow_compute_initialize() explicitly before you use + * computation related features. + * * #GArrowExecuteContext is a class to customize how to execute a * function. * @@ -250,6 +254,25 @@ G_BEGIN_DECLS * There are many functions to compute data on an array. */ +/** + * garrow_compute_initialize: + * @error: (nullable): Return location for a #GError or %NULL. + * + * You must call this explicitly before you use computation related + * features. + * + * Returns: %TRUE if initializing the compute module completed successfully, + * %FALSE otherwise. + * + * Since: 21.0.0 + */ +gboolean +garrow_compute_initialize(GError **error) +{ + auto status = arrow::compute::Initialize(); + return garrow::check(error, status, "[compute][initialize]"); +} + typedef struct GArrowExecuteContextPrivate_ { arrow::compute::ExecContext context; diff --git a/c_glib/arrow-glib/compute.h b/c_glib/arrow-glib/compute.h index 54b0ddb014f..0f689d147e3 100644 --- a/c_glib/arrow-glib/compute.h +++ b/c_glib/arrow-glib/compute.h @@ -25,6 +25,10 @@ G_BEGIN_DECLS +GARROW_AVAILABLE_IN_21_0 +gboolean +garrow_compute_initialize(GError **error); + #define GARROW_TYPE_EXECUTE_CONTEXT (garrow_execute_context_get_type()) GARROW_AVAILABLE_IN_1_0 G_DECLARE_DERIVABLE_TYPE( diff --git a/c_glib/arrow-glib/meson.build b/c_glib/arrow-glib/meson.build index ff52aedf003..b755ffb56ac 100644 --- a/c_glib/arrow-glib/meson.build +++ b/c_glib/arrow-glib/meson.build @@ -223,7 +223,7 @@ gio = cxx.find_library('gio-2.0', dirs: [gobject_libdir], required: false) if not gio.found() gio = dependency('gio-2.0') endif -dependencies = [arrow, arrow_acero, gobject, gio] +dependencies = [arrow_acero, arrow_compute, arrow, gobject, gio] libarrow_glib = library( 'arrow-glib', sources: sources + enums, diff --git a/c_glib/meson.build b/c_glib/meson.build index d783cb399e8..b3f62ab3010 100644 --- a/c_glib/meson.build +++ b/c_glib/meson.build @@ -147,7 +147,13 @@ if arrow_cpp_build_lib_dir == '' modules: ['ArrowCUDA::arrow_cuda_shared'], required: false, ) - # we do not support compiling GLib without Acero engine + # we do not support compiling GLib without Compute and Acero engine + arrow_compute = dependency( + 'arrow-compute', + 'ArrowCompute', + kwargs: common_args, + modules: ['ArrowCompute::arrow_compute_shared'], + ) arrow_acero = dependency( 'arrow-acero', 'ArrowAcero', @@ -215,6 +221,11 @@ main(void) dirs: [arrow_cpp_build_lib_dir], required: false, ) + arrow_compute = cpp_compiler.find_library( + 'arrow_compute', + dirs: [arrow_cpp_build_lib_dir], + required: true, + ) arrow_acero = cpp_compiler.find_library( 'arrow_acero', dirs: [arrow_cpp_build_lib_dir], diff --git a/c_glib/test/run-test.rb b/c_glib/test/run-test.rb index 46d2ebe3f6e..9fdcdcdce0e 100755 --- a/c_glib/test/run-test.rb +++ b/c_glib/test/run-test.rb @@ -31,6 +31,7 @@ Gio = GI.load("Gio") Arrow = GI.load("Arrow") +Arrow.compute_initialize module Arrow class Buffer alias_method :initialize_raw, :initialize diff --git a/cpp/CMakeLists.txt b/cpp/CMakeLists.txt index 3a71e147b54..60a77c815e3 100644 --- a/cpp/CMakeLists.txt +++ b/cpp/CMakeLists.txt @@ -438,6 +438,13 @@ if(ARROW_BUILD_STATIC) string(APPEND ARROW_ACERO_PC_CFLAGS_PRIVATE " -DARROW_ACERO_STATIC") endif() +# For arrow-compute.pc. +set(ARROW_COMPUTE_PC_CFLAGS "") +set(ARROW_COMPUTE_PC_CFLAGS_PRIVATE "") +if(ARROW_BUILD_STATIC) + string(APPEND ARROW_COMPUTE_PC_CFLAGS_PRIVATE " -DARROW_COMPUTE_STATIC") +endif() + # For arrow-cuda.pc. set(ARROW_CUDA_PC_CFLAGS "") set(ARROW_CUDA_PC_CFLAGS_PRIVATE "") diff --git a/cpp/examples/arrow/CMakeLists.txt b/cpp/examples/arrow/CMakeLists.txt index bf0748f5501..87b677f3b57 100644 --- a/cpp/examples/arrow/CMakeLists.txt +++ b/cpp/examples/arrow/CMakeLists.txt @@ -43,7 +43,13 @@ if(ARROW_SUBSTRAIT) endif() if(ARROW_COMPUTE AND ARROW_CSV) - add_arrow_example(compute_and_write_csv_example) + if(ARROW_BUILD_SHARED) + set(COMPUTE_KERNELS_LINK_LIBS arrow_compute_shared) + else() + set(COMPUTE_KERNELS_LINK_LIBS arrow_compute_static) + endif() + add_arrow_example(compute_and_write_csv_example EXTRA_LINK_LIBS + ${COMPUTE_KERNELS_LINK_LIBS}) endif() if(ARROW_FLIGHT) diff --git a/cpp/examples/arrow/compute_and_write_csv_example.cc b/cpp/examples/arrow/compute_and_write_csv_example.cc index 7e0f6cdf1ce..234d6abf570 100644 --- a/cpp/examples/arrow/compute_and_write_csv_example.cc +++ b/cpp/examples/arrow/compute_and_write_csv_example.cc @@ -41,6 +41,7 @@ // in the current directory arrow::Status RunMain(int argc, char** argv) { + ARROW_RETURN_NOT_OK(arrow::compute::Initialize()); // Make Arrays arrow::NumericBuilder int64_builder; arrow::BooleanBuilder boolean_builder; diff --git a/cpp/examples/arrow/join_example.cc b/cpp/examples/arrow/join_example.cc index c1c6e5e82ff..738420d48e1 100644 --- a/cpp/examples/arrow/join_example.cc +++ b/cpp/examples/arrow/join_example.cc @@ -82,6 +82,7 @@ arrow::Result> CreateDataSetFromCSVData } arrow::Status DoHashJoin() { + ARROW_RETURN_NOT_OK(arrow::compute::Initialize()); arrow::dataset::internal::Initialize(); ARROW_ASSIGN_OR_RAISE(auto l_dataset, CreateDataSetFromCSVData(true)); diff --git a/cpp/examples/tutorial_examples/CMakeLists.txt b/cpp/examples/tutorial_examples/CMakeLists.txt index a6f8350c41d..1466bce48af 100644 --- a/cpp/examples/tutorial_examples/CMakeLists.txt +++ b/cpp/examples/tutorial_examples/CMakeLists.txt @@ -37,7 +37,7 @@ target_link_libraries(file_access_example PRIVATE Arrow::arrow_shared Parquet::parquet_shared) add_executable(compute_example compute_example.cc) -target_link_libraries(compute_example PRIVATE Arrow::arrow_shared) +target_link_libraries(compute_example PRIVATE ArrowCompute::arrow_compute_shared) add_executable(dataset_example dataset_example.cc) target_link_libraries(dataset_example PRIVATE ArrowDataset::arrow_dataset_shared) diff --git a/cpp/examples/tutorial_examples/compute_example.cc b/cpp/examples/tutorial_examples/compute_example.cc index 3a65214c0ef..767719c52b0 100644 --- a/cpp/examples/tutorial_examples/compute_example.cc +++ b/cpp/examples/tutorial_examples/compute_example.cc @@ -49,6 +49,9 @@ arrow::Status RunMain() { schema = arrow::schema({field_a, field_b}); + // Initialize the compute module to register the required compute kernels. + ARROW_RETURN_NOT_OK(arrow::compute::Initialize()); + std::shared_ptr table; table = arrow::Table::Make(schema, {some_nums, more_nums}, 5); // (Doc section: Create Tables) diff --git a/cpp/examples/tutorial_examples/dataset_example.cc b/cpp/examples/tutorial_examples/dataset_example.cc index a980fa54939..c32cf6ec4c6 100644 --- a/cpp/examples/tutorial_examples/dataset_example.cc +++ b/cpp/examples/tutorial_examples/dataset_example.cc @@ -19,6 +19,7 @@ // (Doc section: Includes) #include +#include #include // We use Parquet headers for setting up examples; they are not required for using // datasets. @@ -75,6 +76,8 @@ arrow::Result CreateExampleParquetDataset( } arrow::Status PrepareEnv() { + // Initilize the compute module to register the required kernels for Dataset + ARROW_RETURN_NOT_OK(arrow::compute::Initialize()); // Get our environment prepared for reading, by setting up some quick writing. ARROW_ASSIGN_OR_RAISE(auto src_table, CreateTable()) std::shared_ptr setup_fs; diff --git a/cpp/src/arrow/ArrowComputeConfig.cmake.in b/cpp/src/arrow/ArrowComputeConfig.cmake.in new file mode 100644 index 00000000000..f38c776c8c8 --- /dev/null +++ b/cpp/src/arrow/ArrowComputeConfig.cmake.in @@ -0,0 +1,38 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. +# +# This config sets the following variables in your project:: +# +# ArrowCompute_FOUND - true if Arrow Compute found on the system +# +# This config sets the following targets in your project:: +# +# ArrowCompute::arrow_compute_shared - for linked as shared library if shared library is built +# ArrowCompute::arrow_compute_static - for linked as static library if static library is built + +@PACKAGE_INIT@ + +include(CMakeFindDependencyMacro) +find_dependency(Arrow CONFIG) + +include("${CMAKE_CURRENT_LIST_DIR}/ArrowComputeTargets.cmake") + +arrow_keep_backward_compatibility(ArrowCompute arrow_compute) + +check_required_components(ArrowCompute) + +arrow_show_details(ArrowCompute ARROW_COMPUTE) diff --git a/cpp/src/arrow/CMakeLists.txt b/cpp/src/arrow/CMakeLists.txt index 917f1d02a55..47426ba604e 100644 --- a/cpp/src/arrow/CMakeLists.txt +++ b/cpp/src/arrow/CMakeLists.txt @@ -724,7 +724,6 @@ set(ARROW_COMPUTE_SRCS compute/registry.cc compute/kernels/chunked_internal.cc compute/kernels/codegen_internal.cc - compute/kernels/ree_util_internal.cc compute/kernels/scalar_cast_boolean.cc compute/kernels/scalar_cast_dictionary.cc compute/kernels/scalar_cast_extension.cc @@ -733,7 +732,6 @@ set(ARROW_COMPUTE_SRCS compute/kernels/scalar_cast_numeric.cc compute/kernels/scalar_cast_string.cc compute/kernels/scalar_cast_temporal.cc - compute/kernels/util_internal.cc compute/kernels/vector_hash.cc compute/kernels/vector_selection.cc compute/kernels/vector_selection_filter_internal.cc @@ -741,9 +739,18 @@ set(ARROW_COMPUTE_SRCS compute/kernels/vector_selection_take_internal.cc) if(ARROW_COMPUTE) + # If libarrow_compute.a is only built, "pkg-config --cflags --libs + # arrow-compute" outputs build flags for static linking not shared + # linking. ARROW_COMPUTE_PC_* except ARROW_COMPUTE_PC_*_PRIVATE are for + # the static linking case. + if(NOT ARROW_BUILD_SHARED AND ARROW_BUILD_STATIC) + string(APPEND ARROW_COMPUTE_PC_CFLAGS "${ARROW_COMPUTE_PC_CFLAGS_PRIVATE}") + set(ARROW_COMPUTE_PC_CFLAGS_PRIVATE "") + endif() # Include the remaining kernels list(APPEND - ARROW_COMPUTE_SRCS + ARROW_COMPUTE_LIB_SRCS + compute/initialize.cc compute/kernels/aggregate_basic.cc compute/kernels/aggregate_mode.cc compute/kernels/aggregate_pivot.cc @@ -754,6 +761,7 @@ if(ARROW_COMPUTE) compute/kernels/hash_aggregate_numeric.cc compute/kernels/hash_aggregate_pivot.cc compute/kernels/pivot_internal.cc + compute/kernels/ree_util_internal.cc compute/kernels/scalar_arithmetic.cc compute/kernels/scalar_boolean.cc compute/kernels/scalar_compare.cc @@ -767,6 +775,7 @@ if(ARROW_COMPUTE) compute/kernels/scalar_temporal_binary.cc compute/kernels/scalar_temporal_unary.cc compute/kernels/scalar_validity.cc + compute/kernels/util_internal.cc compute/kernels/vector_array_sort.cc compute/kernels/vector_cumulative_ops.cc compute/kernels/vector_nested.cc @@ -789,40 +798,89 @@ if(ARROW_COMPUTE) compute/util.cc compute/util_internal.cc) - append_runtime_avx2_src(ARROW_COMPUTE_SRCS compute/kernels/aggregate_basic_avx2.cc) - append_runtime_avx512_src(ARROW_COMPUTE_SRCS compute/kernels/aggregate_basic_avx512.cc) - append_runtime_avx2_src(ARROW_COMPUTE_SRCS compute/key_hash_internal_avx2.cc) - append_runtime_avx2_bmi2_src(ARROW_COMPUTE_SRCS compute/key_map_internal_avx2.cc) - append_runtime_avx2_src(ARROW_COMPUTE_SRCS compute/row/compare_internal_avx2.cc) - append_runtime_avx2_src(ARROW_COMPUTE_SRCS compute/row/encode_internal_avx2.cc) - append_runtime_avx2_bmi2_src(ARROW_COMPUTE_SRCS compute/util_avx2.cc) - -endif() + append_runtime_avx2_src(ARROW_COMPUTE_LIB_SRCS compute/kernels/aggregate_basic_avx2.cc) + append_runtime_avx512_src(ARROW_COMPUTE_LIB_SRCS + compute/kernels/aggregate_basic_avx512.cc) + append_runtime_avx2_src(ARROW_COMPUTE_LIB_SRCS compute/key_hash_internal_avx2.cc) + append_runtime_avx2_bmi2_src(ARROW_COMPUTE_LIB_SRCS compute/key_map_internal_avx2.cc) + append_runtime_avx2_src(ARROW_COMPUTE_LIB_SRCS compute/row/compare_internal_avx2.cc) + append_runtime_avx2_src(ARROW_COMPUTE_LIB_SRCS compute/row/encode_internal_avx2.cc) + append_runtime_avx2_bmi2_src(ARROW_COMPUTE_LIB_SRCS compute/util_avx2.cc) + + set(ARROW_COMPUTE_SHARED_PRIVATE_LINK_LIBS) + set(ARROW_COMPUTE_SHARED_LINK_LIBS) + set(ARROW_COMPUTE_STATIC_LINK_LIBS) + set(ARROW_COMPUTE_STATIC_INSTALL_INTERFACE_LIBS) + set(ARROW_COMPUTE_SHARED_INSTALL_INTERFACE_LIBS) + + list(APPEND ARROW_COMPUTE_STATIC_INSTALL_INTERFACE_LIBS Arrow::arrow_static) + list(APPEND ARROW_COMPUTE_SHARED_INSTALL_INTERFACE_LIBS Arrow::arrow_shared) + list(APPEND ARROW_COMPUTE_STATIC_LINK_LIBS arrow_static) + list(APPEND ARROW_COMPUTE_SHARED_LINK_LIBS arrow_shared) + + if(ARROW_USE_BOOST) + list(APPEND ARROW_COMPUTE_STATIC_LINK_LIBS Boost::headers) + list(APPEND ARROW_COMPUTE_SHARED_PRIVATE_LINK_LIBS Boost::headers) + endif() + if(ARROW_USE_XSIMD) + list(APPEND ARROW_COMPUTE_STATIC_LINK_LIBS ${ARROW_XSIMD}) + list(APPEND ARROW_COMPUTE_SHARED_PRIVATE_LINK_LIBS ${ARROW_XSIMD}) + endif() + if(ARROW_WITH_OPENTELEMETRY) + list(APPEND ARROW_COMPUTE_STATIC_LINK_LIBS ${ARROW_OPENTELEMETRY_LIBS}) + list(APPEND ARROW_COMPUTE_SHARED_PRIVATE_LINK_LIBS ${ARROW_OPENTELEMETRY_LIBS}) + endif() + if(ARROW_WITH_RE2) + list(APPEND ARROW_COMPUTE_STATIC_LINK_LIBS re2::re2) + list(APPEND ARROW_COMPUTE_SHARED_PRIVATE_LINK_LIBS re2::re2) + endif() + if(ARROW_WITH_UTF8PROC) + list(APPEND ARROW_COMPUTE_STATIC_LINK_LIBS utf8proc::utf8proc) + list(APPEND ARROW_COMPUTE_SHARED_PRIVATE_LINK_LIBS utf8proc::utf8proc) + endif() -arrow_add_object_library(ARROW_COMPUTE ${ARROW_COMPUTE_SRCS}) -if(ARROW_USE_BOOST) - foreach(ARROW_COMPUTE_TARGET ${ARROW_COMPUTE_TARGETS}) - target_link_libraries(${ARROW_COMPUTE_TARGET} PRIVATE Boost::headers) + add_arrow_lib(arrow_compute + CMAKE_PACKAGE_NAME + ArrowCompute + PKG_CONFIG_NAME + arrow-compute + SHARED_LINK_LIBS + ${ARROW_COMPUTE_SHARED_LINK_LIBS} + SHARED_PRIVATE_LINK_LIBS + ${ARROW_COMPUTE_SHARED_PRIVATE_LINK_LIBS} + SHARED_INSTALL_INTERFACE_LIBS + ${ARROW_COMPUTE_SHARED_INSTALL_INTERFACE_LIBS} + STATIC_LINK_LIBS + ${ARROW_COMPUTE_STATIC_LINK_LIBS} + STATIC_INSTALL_INTERFACE_LIBS + ${ARROW_COMPUTE_STATIC_INSTALL_INTERFACE_LIBS} + OUTPUTS + ARROW_COMPUTE_LIBRARIES + SOURCES + ${ARROW_COMPUTE_LIB_SRCS} + SHARED_LINK_FLAGS + ${ARROW_VERSION_SCRIPT_FLAGS} # Defined in cpp/arrow/CMakeLists.txt + ) + foreach(LIB_TARGET ${ARROW_COMPUTE_LIBRARIES}) + target_compile_definitions(${LIB_TARGET} PRIVATE ARROW_COMPUTE_EXPORTING) endforeach() + + if(ARROW_BUILD_STATIC AND WIN32) + target_compile_definitions(arrow_compute_static PUBLIC ARROW_COMPUTE_STATIC) + endif() endif() + +arrow_add_object_library(ARROW_COMPUTE_CORE ${ARROW_COMPUTE_SRCS}) + if(ARROW_USE_XSIMD) - foreach(ARROW_COMPUTE_TARGET ${ARROW_COMPUTE_TARGETS}) - target_link_libraries(${ARROW_COMPUTE_TARGET} PRIVATE ${ARROW_XSIMD}) + foreach(ARROW_COMPUTE_CORE_TARGET ${ARROW_COMPUTE_CORE_TARGETS}) + target_link_libraries(${ARROW_COMPUTE_CORE_TARGET} PRIVATE ${ARROW_XSIMD}) endforeach() endif() if(ARROW_WITH_OPENTELEMETRY) - foreach(ARROW_COMPUTE_TARGET ${ARROW_COMPUTE_TARGETS}) - target_link_libraries(${ARROW_COMPUTE_TARGET} PRIVATE ${ARROW_OPENTELEMETRY_LIBS}) - endforeach() -endif() -if(ARROW_WITH_RE2) - foreach(ARROW_COMPUTE_TARGET ${ARROW_COMPUTE_TARGETS}) - target_link_libraries(${ARROW_COMPUTE_TARGET} PRIVATE re2::re2) - endforeach() -endif() -if(ARROW_WITH_UTF8PROC) - foreach(ARROW_COMPUTE_TARGET ${ARROW_COMPUTE_TARGETS}) - target_link_libraries(${ARROW_COMPUTE_TARGET} PRIVATE utf8proc::utf8proc) + foreach(ARROW_COMPUTE_CORE_TARGET ${ARROW_COMPUTE_CORE_TARGETS}) + target_link_libraries(${ARROW_COMPUTE_CORE_TARGET} + PRIVATE ${ARROW_OPENTELEMETRY_LIBS}) endforeach() endif() @@ -1031,7 +1089,7 @@ add_arrow_lib(arrow ${ARROW_SHARED_LINK_FLAGS} SHARED_PRIVATE_LINK_LIBS ${ARROW_ARRAY_TARGET_SHARED} - ${ARROW_COMPUTE_TARGET_SHARED} + ${ARROW_COMPUTE_CORE_TARGET_SHARED} ${ARROW_CSV_TARGET_SHARED} ${ARROW_FILESYSTEM_TARGET_SHARED} ${ARROW_INTEGRATION_TARGET_SHARED} @@ -1047,7 +1105,7 @@ add_arrow_lib(arrow ${ARROW_SYSTEM_LINK_LIBS} STATIC_LINK_LIBS ${ARROW_ARRAY_TARGET_STATIC} - ${ARROW_COMPUTE_TARGET_STATIC} + ${ARROW_COMPUTE_CORE_TARGET_STATIC} ${ARROW_CSV_TARGET_STATIC} ${ARROW_FILESYSTEM_TARGET_STATIC} ${ARROW_INTEGRATION_TARGET_STATIC} diff --git a/cpp/src/arrow/acero/ArrowAceroConfig.cmake.in b/cpp/src/arrow/acero/ArrowAceroConfig.cmake.in index fbb7a2bcafa..47488e8ac86 100644 --- a/cpp/src/arrow/acero/ArrowAceroConfig.cmake.in +++ b/cpp/src/arrow/acero/ArrowAceroConfig.cmake.in @@ -26,8 +26,12 @@ @PACKAGE_INIT@ +set(ARROW_ACERO_REQUIRED_DEPENDENCIES "@ARROW_ACERO_REQUIRED_DEPENDENCIES@") + include(CMakeFindDependencyMacro) -find_dependency(Arrow CONFIG) +foreach(dependency ${ARROW_ACERO_REQUIRED_DEPENDENCIES}) + find_dependency(${dependency} CONFIG) +endforeach() include("${CMAKE_CURRENT_LIST_DIR}/ArrowAceroTargets.cmake") diff --git a/cpp/src/arrow/acero/CMakeLists.txt b/cpp/src/arrow/acero/CMakeLists.txt index c3b08af84e0..37e00fd2566 100644 --- a/cpp/src/arrow/acero/CMakeLists.txt +++ b/cpp/src/arrow/acero/CMakeLists.txt @@ -28,6 +28,9 @@ if(NOT ARROW_BUILD_SHARED AND ARROW_BUILD_STATIC) set(ARROW_ACERO_PC_CFLAGS_PRIVATE "") endif() +set(ARROW_ACERO_PKG_CONFIG_REQUIRES "arrow-compute") +set(ARROW_ACERO_REQUIRED_DEPENDENCIES Arrow ArrowCompute) + set(ARROW_ACERO_SRCS accumulation_queue.cc scalar_aggregate_node.cc @@ -73,10 +76,12 @@ if(ARROW_WITH_OPENTELEMETRY) list(APPEND ARROW_ACERO_STATIC_LINK_LIBS ${ARROW_OPENTELEMETRY_LIBS}) endif() -list(APPEND ARROW_ACERO_STATIC_INSTALL_INTERFACE_LIBS Arrow::arrow_static) -list(APPEND ARROW_ACERO_SHARED_INSTALL_INTERFACE_LIBS Arrow::arrow_shared) -list(APPEND ARROW_ACERO_STATIC_LINK_LIBS arrow_static) -list(APPEND ARROW_ACERO_SHARED_LINK_LIBS arrow_shared) +list(APPEND ARROW_ACERO_STATIC_INSTALL_INTERFACE_LIBS Arrow::arrow_static + ArrowCompute::arrow_compute_static) +list(APPEND ARROW_ACERO_SHARED_INSTALL_INTERFACE_LIBS Arrow::arrow_shared + ArrowCompute::arrow_compute_shared) +list(APPEND ARROW_ACERO_STATIC_LINK_LIBS arrow_static arrow_compute_static) +list(APPEND ARROW_ACERO_SHARED_LINK_LIBS arrow_shared arrow_compute_shared) add_arrow_lib(arrow_acero CMAKE_PACKAGE_NAME diff --git a/cpp/src/arrow/acero/arrow-acero.pc.in b/cpp/src/arrow/acero/arrow-acero.pc.in index ddddd52c4dd..94249cd78bd 100644 --- a/cpp/src/arrow/acero/arrow-acero.pc.in +++ b/cpp/src/arrow/acero/arrow-acero.pc.in @@ -22,7 +22,7 @@ libdir=@ARROW_PKG_CONFIG_LIBDIR@ Name: Apache Arrow Acero Engine Description: Apache Arrow's Acero Engine. Version: @ARROW_VERSION@ -Requires: arrow +Requires: @ARROW_ACERO_PKG_CONFIG_REQUIRES@ Libs: -L${libdir} -larrow_acero Cflags:@ARROW_ACERO_PC_CFLAGS@ Cflags.private:@ARROW_ACERO_PC_CFLAGS_PRIVATE@ diff --git a/cpp/src/arrow/arrow-compute.pc.in b/cpp/src/arrow/arrow-compute.pc.in new file mode 100644 index 00000000000..2da0986d612 --- /dev/null +++ b/cpp/src/arrow/arrow-compute.pc.in @@ -0,0 +1,28 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +prefix=@CMAKE_INSTALL_PREFIX@ +includedir=@ARROW_PKG_CONFIG_INCLUDEDIR@ +libdir=@ARROW_PKG_CONFIG_LIBDIR@ + +Name: Apache Arrow Compute Kernels +Description: Apache Arrow's Compute Kernels. +Version: @ARROW_VERSION@ +Requires: arrow +Libs: -L${libdir} -larrow_compute +Cflags:@ARROW_COMPUTE_PC_CFLAGS@ +Cflags.private:@ARROW_COMPUTE_PC_CFLAGS_PRIVATE@ diff --git a/cpp/src/arrow/c/CMakeLists.txt b/cpp/src/arrow/c/CMakeLists.txt index 81a81cd3f11..a7f722aacc9 100644 --- a/cpp/src/arrow/c/CMakeLists.txt +++ b/cpp/src/arrow/c/CMakeLists.txt @@ -15,7 +15,28 @@ # specific language governing permissions and limitations # under the License. -add_arrow_test(bridge_test PREFIX "arrow-c") +# TODO(GH-37221): Remove compute dependency for REE requirements on bridge_test +set(ARROW_TEST_LINK_LIBS "") + +if(ARROW_TEST_LINKAGE STREQUAL "static") + list(APPEND ARROW_TEST_LINK_LIBS ${ARROW_TEST_STATIC_LINK_LIBS}) +else() + list(APPEND ARROW_TEST_LINK_LIBS ${ARROW_TEST_SHARED_LINK_LIBS}) +endif() + +if(ARROW_COMPUTE) + if(ARROW_TEST_LINKAGE STREQUAL "static") + list(APPEND ARROW_TEST_LINK_LIBS arrow_compute_static arrow_compute_testing) + else() + list(APPEND ARROW_TEST_LINK_LIBS arrow_compute_shared arrow_compute_testing) + endif() +endif() + +add_arrow_test(bridge_test + PREFIX + "arrow-c" + STATIC_LINK_LIBS + ${ARROW_TEST_LINK_LIBS}) add_arrow_test(dlpack_test) add_arrow_benchmark(bridge_benchmark) diff --git a/cpp/src/arrow/compute/CMakeLists.txt b/cpp/src/arrow/compute/CMakeLists.txt index 6deb2cbad8c..4255f2971c1 100644 --- a/cpp/src/arrow/compute/CMakeLists.txt +++ b/cpp/src/arrow/compute/CMakeLists.txt @@ -27,13 +27,30 @@ endif() # # Unit tests # +if(ARROW_TEST_LINKAGE STREQUAL "static") + set(ARROW_COMPUTE_TEST_LINK_LIBS arrow_compute_static ${ARROW_TEST_STATIC_LINK_LIBS}) +else() + set(ARROW_COMPUTE_TEST_LINK_LIBS arrow_compute_shared ${ARROW_TEST_SHARED_LINK_LIBS}) +endif() -# Define arrow_compute_testing object library for common test files +# Define arrow_compute_core_testing object library for common test files requiring +# only core compute. No extra kernels are required. if(ARROW_TESTING) - add_library(arrow_compute_testing OBJECT test_util_internal.cc) + add_library(arrow_compute_core_testing OBJECT test_util_internal.cc) # Even though this is still just an object library we still need to "link" our # dependencies so that include paths are configured correctly - target_link_libraries(arrow_compute_testing PUBLIC ${ARROW_GTEST_GMOCK}) + target_link_libraries(arrow_compute_core_testing PUBLIC ${ARROW_GTEST_GMOCK}) +endif() + +# Define arrow_compute_testing object library for test files requiring extra kernels. +if(ARROW_TESTING AND ARROW_COMPUTE) + set(ARROW_COMPUTE_TESTING_SRCS test_env.cc) + add_library(arrow_compute_testing OBJECT ${ARROW_COMPUTE_TESTING_SRCS}) + # Even though this is still just an object library we still need to "link" + # arrow_compute_core_testing so that is also included correctly + target_link_libraries(arrow_compute_testing + PUBLIC $ + PUBLIC ${ARROW_GTEST_GTEST_MAIN}) endif() set(ARROW_COMPUTE_TEST_PREFIX "arrow-compute") @@ -86,6 +103,8 @@ function(ADD_ARROW_COMPUTE_TEST REL_TEST_NAME) ${PREFIX} LABELS ${LABELS} + STATIC_LINK_LIBS + ${ARROW_COMPUTE_TEST_LINK_LIBS} ${ARG_UNPARSED_ARGUMENTS}) endfunction() @@ -97,7 +116,7 @@ add_arrow_test(internals_test kernel_test.cc registry_test.cc EXTRA_LINK_LIBS - arrow_compute_testing) + arrow_compute_core_testing) add_arrow_compute_test(expression_test SOURCES diff --git a/cpp/src/arrow/compute/api.h b/cpp/src/arrow/compute/api.h index b701d992869..343e30643cf 100644 --- a/cpp/src/arrow/compute/api.h +++ b/cpp/src/arrow/compute/api.h @@ -34,6 +34,7 @@ #include "arrow/compute/cast.h" // IWYU pragma: export #include "arrow/compute/function.h" // IWYU pragma: export #include "arrow/compute/function_options.h" // IWYU pragma: export +#include "arrow/compute/initialize.h" // IWYU pragma: export #include "arrow/compute/kernel.h" // IWYU pragma: export #include "arrow/compute/registry.h" // IWYU pragma: export #include "arrow/datum.h" // IWYU pragma: export diff --git a/cpp/src/arrow/compute/initialize.cc b/cpp/src/arrow/compute/initialize.cc new file mode 100644 index 00000000000..d126ac951ff --- /dev/null +++ b/cpp/src/arrow/compute/initialize.cc @@ -0,0 +1,80 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. +#include "arrow/compute/initialize.h" + +#include "arrow/compute/registry_internal.h" +#include "arrow/compute/type_fwd.h" +#include "arrow/status.h" + +namespace arrow::compute { +namespace { + +Status RegisterComputeKernels() { + auto registry = GetFunctionRegistry(); + + // Register additional kernels on libarrow_compute + // Scalar functions + internal::RegisterScalarArithmetic(registry); + internal::RegisterScalarBoolean(registry); + internal::RegisterScalarComparison(registry); + internal::RegisterScalarIfElse(registry); + internal::RegisterScalarNested(registry); + internal::RegisterScalarRandom(registry); // Nullary + internal::RegisterScalarRoundArithmetic(registry); + internal::RegisterScalarSetLookup(registry); + internal::RegisterScalarStringAscii(registry); + internal::RegisterScalarStringUtf8(registry); + internal::RegisterScalarTemporalBinary(registry); + internal::RegisterScalarTemporalUnary(registry); + internal::RegisterScalarValidity(registry); + + // Vector functions + internal::RegisterVectorArraySort(registry); + internal::RegisterVectorCumulativeSum(registry); + internal::RegisterVectorNested(registry); + internal::RegisterVectorRank(registry); + internal::RegisterVectorReplace(registry); + internal::RegisterVectorSelectK(registry); + internal::RegisterVectorSort(registry); + internal::RegisterVectorRunEndEncode(registry); + internal::RegisterVectorRunEndDecode(registry); + internal::RegisterVectorPairwise(registry); + internal::RegisterVectorStatistics(registry); + internal::RegisterVectorSwizzle(registry); + + // Aggregate functions + internal::RegisterHashAggregateBasic(registry); + internal::RegisterHashAggregateNumeric(registry); + internal::RegisterHashAggregatePivot(registry); + internal::RegisterScalarAggregateBasic(registry); + internal::RegisterScalarAggregateMode(registry); + internal::RegisterScalarAggregatePivot(registry); + internal::RegisterScalarAggregateQuantile(registry); + internal::RegisterScalarAggregateTDigest(registry); + internal::RegisterScalarAggregateVariance(registry); + + return Status::OK(); +} + +} // namespace + +Status Initialize() { + static auto st = RegisterComputeKernels(); + return st; +} + +} // namespace arrow::compute diff --git a/cpp/src/arrow/compute/initialize.h b/cpp/src/arrow/compute/initialize.h new file mode 100644 index 00000000000..db5e231325b --- /dev/null +++ b/cpp/src/arrow/compute/initialize.h @@ -0,0 +1,32 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +#pragma once + +#include "arrow/compute/visibility.h" +#include "arrow/status.h" + +namespace arrow::compute { + +/// \brief Initialize the compute module. +/// +/// Register the compute kernel functions to be available on the +/// global FunctionRegistry. +/// This function will only be available if ARROW_COMPUTE is enabled. +ARROW_COMPUTE_EXPORT Status Initialize(); + +} // namespace arrow::compute diff --git a/cpp/src/arrow/compute/kernels/CMakeLists.txt b/cpp/src/arrow/compute/kernels/CMakeLists.txt index 81b7adeb4aa..929cca8f5a4 100644 --- a/cpp/src/arrow/compute/kernels/CMakeLists.txt +++ b/cpp/src/arrow/compute/kernels/CMakeLists.txt @@ -15,6 +15,8 @@ # specific language governing permissions and limitations # under the License. +arrow_install_all_headers("arrow/compute/kernels") + # ---------------------------------------------------------------------- # Tests that don't require the full kernel library @@ -32,7 +34,7 @@ add_arrow_test(scalar_cast_test scalar_cast_test.cc EXTRA_LINK_LIBS arrow_compute_kernels_testing - arrow_compute_testing) + arrow_compute_core_testing) # ---------------------------------------------------------------------- # Scalar kernels diff --git a/cpp/src/arrow/compute/kernels/aggregate_basic.cc b/cpp/src/arrow/compute/kernels/aggregate_basic.cc index 68b1ac7c03c..ee2c615bbfb 100644 --- a/cpp/src/arrow/compute/kernels/aggregate_basic.cc +++ b/cpp/src/arrow/compute/kernels/aggregate_basic.cc @@ -18,6 +18,7 @@ #include "arrow/compute/api_aggregate.h" #include "arrow/compute/kernels/aggregate_basic_internal.h" #include "arrow/compute/kernels/aggregate_internal.h" +#include "arrow/compute/kernels/codegen_internal.h" #include "arrow/compute/kernels/common_internal.h" #include "arrow/compute/kernels/util_internal.h" #include "arrow/util/cpu_info.h" diff --git a/cpp/src/arrow/compute/kernels/chunked_internal.h b/cpp/src/arrow/compute/kernels/chunked_internal.h index 5bc8233016f..330bd185f25 100644 --- a/cpp/src/arrow/compute/kernels/chunked_internal.h +++ b/cpp/src/arrow/compute/kernels/chunked_internal.h @@ -27,6 +27,7 @@ #include "arrow/chunk_resolver.h" #include "arrow/compute/kernels/codegen_internal.h" #include "arrow/util/span.h" +#include "arrow/util/visibility.h" namespace arrow::compute::internal { @@ -120,11 +121,11 @@ class ChunkedArrayResolver { } }; -std::vector GetArrayPointers(const ArrayVector& arrays); +ARROW_EXPORT std::vector GetArrayPointers(const ArrayVector& arrays); // A class that turns logical (linear) indices into physical (chunked) indices, // and vice-versa. -class ChunkedIndexMapper { +class ARROW_EXPORT ChunkedIndexMapper { public: ChunkedIndexMapper(const std::vector& chunks, uint64_t* indices_begin, uint64_t* indices_end) diff --git a/cpp/src/arrow/compute/kernels/codegen_internal.h b/cpp/src/arrow/compute/kernels/codegen_internal.h index 94677de9440..289ba25f0b7 100644 --- a/cpp/src/arrow/compute/kernels/codegen_internal.h +++ b/cpp/src/arrow/compute/kernels/codegen_internal.h @@ -474,10 +474,12 @@ static void VisitTwoArrayValuesInline(const ArraySpan& arr0, const ArraySpan& ar // ---------------------------------------------------------------------- // Reusable type resolvers -Result FirstType(KernelContext*, const std::vector& types); -Result LastType(KernelContext*, const std::vector& types); -Result ListValuesType(KernelContext* ctx, - const std::vector& types); +ARROW_EXPORT Result FirstType(KernelContext*, + const std::vector& types); +ARROW_EXPORT Result LastType(KernelContext*, + const std::vector& types); +ARROW_EXPORT Result ListValuesType(KernelContext* ctx, + const std::vector& types); // ---------------------------------------------------------------------- // Helpers for iterating over common DataType instances for adding kernels to diff --git a/cpp/src/arrow/compute/kernels/hash_aggregate.cc b/cpp/src/arrow/compute/kernels/hash_aggregate.cc index 18a5590b2e3..0e3e359bde1 100644 --- a/cpp/src/arrow/compute/kernels/hash_aggregate.cc +++ b/cpp/src/arrow/compute/kernels/hash_aggregate.cc @@ -26,7 +26,9 @@ #include "arrow/array/concatenate.h" #include "arrow/compute/api_aggregate.h" #include "arrow/compute/api_vector.h" +#include "arrow/compute/kernel.h" #include "arrow/compute/kernels/aggregate_internal.h" +#include "arrow/compute/kernels/codegen_internal.h" #include "arrow/compute/kernels/common_internal.h" #include "arrow/compute/kernels/hash_aggregate_internal.h" #include "arrow/compute/kernels/util_internal.h" diff --git a/cpp/src/arrow/compute/kernels/scalar_temporal_unary.cc b/cpp/src/arrow/compute/kernels/scalar_temporal_unary.cc index ed4f0c3c8ea..2864234f8a5 100644 --- a/cpp/src/arrow/compute/kernels/scalar_temporal_unary.cc +++ b/cpp/src/arrow/compute/kernels/scalar_temporal_unary.cc @@ -21,6 +21,7 @@ #include "arrow/builder.h" #include "arrow/compute/api_scalar.h" +#include "arrow/compute/kernels/codegen_internal.h" #include "arrow/compute/kernels/common_internal.h" #include "arrow/compute/kernels/temporal_internal.h" #include "arrow/util/checked_cast.h" diff --git a/cpp/src/arrow/compute/kernels/vector_hash.cc b/cpp/src/arrow/compute/kernels/vector_hash.cc index bd8cbdb0430..c969f330b70 100644 --- a/cpp/src/arrow/compute/kernels/vector_hash.cc +++ b/cpp/src/arrow/compute/kernels/vector_hash.cc @@ -29,6 +29,7 @@ #include "arrow/buffer.h" #include "arrow/compute/api_vector.h" #include "arrow/compute/cast.h" +#include "arrow/compute/kernels/codegen_internal.h" #include "arrow/compute/kernels/common_internal.h" #include "arrow/result.h" #include "arrow/util/hashing.h" diff --git a/cpp/src/arrow/compute/kernels/vector_replace.cc b/cpp/src/arrow/compute/kernels/vector_replace.cc index 7f6713f74c1..945f48a64ca 100644 --- a/cpp/src/arrow/compute/kernels/vector_replace.cc +++ b/cpp/src/arrow/compute/kernels/vector_replace.cc @@ -16,6 +16,7 @@ // under the License. #include "arrow/compute/api_scalar.h" +#include "arrow/compute/kernels/codegen_internal.h" #include "arrow/compute/kernels/common_internal.h" #include "arrow/compute/kernels/copy_data_internal.h" #include "arrow/compute/kernels/util_internal.h" diff --git a/cpp/src/arrow/compute/key_hash_internal.h b/cpp/src/arrow/compute/key_hash_internal.h index 769f3b2145e..d141603ce0f 100644 --- a/cpp/src/arrow/compute/key_hash_internal.h +++ b/cpp/src/arrow/compute/key_hash_internal.h @@ -21,6 +21,7 @@ #include "arrow/compute/light_array_internal.h" #include "arrow/compute/util.h" +#include "arrow/compute/visibility.h" #include "arrow/util/simd.h" namespace arrow { @@ -34,7 +35,7 @@ enum class BloomFilterBuildStrategy; // Implementations are based on xxh3 32-bit algorithm description from: // https://github.com/Cyan4973/xxHash/blob/dev/doc/xxhash_spec.md // -class ARROW_EXPORT Hashing32 { +class ARROW_COMPUTE_EXPORT Hashing32 { friend class TestVectorHash; template friend void TestBloomLargeHashHelper(int64_t, int64_t, const std::vector&, @@ -157,7 +158,7 @@ class ARROW_EXPORT Hashing32 { #endif }; -class ARROW_EXPORT Hashing64 { +class ARROW_COMPUTE_EXPORT Hashing64 { friend class TestVectorHash; template friend void TestBloomLargeHashHelper(int64_t, int64_t, const std::vector&, diff --git a/cpp/src/arrow/compute/key_map_internal.h b/cpp/src/arrow/compute/key_map_internal.h index c558ef5c2a6..27583e82ade 100644 --- a/cpp/src/arrow/compute/key_map_internal.h +++ b/cpp/src/arrow/compute/key_map_internal.h @@ -22,6 +22,7 @@ #include "arrow/compute/util.h" #include "arrow/compute/util_internal.h" +#include "arrow/compute/visibility.h" #include "arrow/result.h" #include "arrow/status.h" #include "arrow/type_fwd.h" @@ -37,7 +38,7 @@ namespace compute { // slots, stamps) and operations provided by this class is given in the document: // arrow/acero/doc/key_map.md. // -class ARROW_EXPORT SwissTable { +class ARROW_COMPUTE_EXPORT SwissTable { friend class SwissTableMerge; public: diff --git a/cpp/src/arrow/compute/light_array_internal.h b/cpp/src/arrow/compute/light_array_internal.h index cf7b95cbe74..ecd7e758ecd 100644 --- a/cpp/src/arrow/compute/light_array_internal.h +++ b/cpp/src/arrow/compute/light_array_internal.h @@ -23,6 +23,7 @@ #include "arrow/compute/exec.h" #include "arrow/compute/util.h" #include "arrow/compute/util_internal.h" +#include "arrow/compute/visibility.h" #include "arrow/type.h" #include "arrow/util/cpu_info.h" #include "arrow/util/logging.h" @@ -53,7 +54,7 @@ struct LightContext { /// and no children. /// /// This metadata object is a zero-allocation analogue of arrow::DataType -struct ARROW_EXPORT KeyColumnMetadata { +struct ARROW_COMPUTE_EXPORT KeyColumnMetadata { KeyColumnMetadata() = default; KeyColumnMetadata(bool is_fixed_length_in, uint32_t fixed_length_in, bool is_null_type_in = false) @@ -81,7 +82,7 @@ struct ARROW_EXPORT KeyColumnMetadata { /// A "key" column is a non-nested, non-union column \see KeyColumnMetadata /// /// This metadata object is a zero-allocation analogue of arrow::ArrayData -class ARROW_EXPORT KeyColumnArray { +class ARROW_COMPUTE_EXPORT KeyColumnArray { public: /// \brief Create an uninitialized KeyColumnArray KeyColumnArray() = default; @@ -218,7 +219,7 @@ class ARROW_EXPORT KeyColumnArray { /// /// This should only be called on "key" columns. Calling this with /// a non-key column will return Status::TypeError. -ARROW_EXPORT Result ColumnMetadataFromDataType( +ARROW_COMPUTE_EXPORT Result ColumnMetadataFromDataType( const std::shared_ptr& type); /// \brief Create KeyColumnArray from ArrayData @@ -228,7 +229,7 @@ ARROW_EXPORT Result ColumnMetadataFromDataType( /// /// The caller should ensure this is only called on "key" columns. /// \see ColumnMetadataFromDataType for details -ARROW_EXPORT Result ColumnArrayFromArrayData( +ARROW_COMPUTE_EXPORT Result ColumnArrayFromArrayData( const std::shared_ptr& array_data, int64_t start_row, int64_t num_rows); /// \brief Create KeyColumnArray from ArrayData and KeyColumnMetadata @@ -238,7 +239,7 @@ ARROW_EXPORT Result ColumnArrayFromArrayData( /// /// The caller should ensure this is only called on "key" columns. /// \see ColumnMetadataFromDataType for details -ARROW_EXPORT KeyColumnArray ColumnArrayFromArrayDataAndMetadata( +ARROW_COMPUTE_EXPORT KeyColumnArray ColumnArrayFromArrayDataAndMetadata( const std::shared_ptr& array_data, const KeyColumnMetadata& metadata, int64_t start_row, int64_t num_rows); @@ -248,7 +249,7 @@ ARROW_EXPORT KeyColumnArray ColumnArrayFromArrayDataAndMetadata( /// /// All columns in `batch` must be eligible "key" columns and have an array shape /// \see ColumnMetadataFromDataType for more details -ARROW_EXPORT Status ColumnMetadatasFromExecBatch( +ARROW_COMPUTE_EXPORT Status ColumnMetadatasFromExecBatch( const ExecBatch& batch, std::vector* column_metadatas); /// \brief Create KeyColumnArray instances from a slice of an ExecBatch @@ -257,9 +258,9 @@ ARROW_EXPORT Status ColumnMetadatasFromExecBatch( /// /// All columns in `batch` must be eligible "key" columns and have an array shape /// \see ColumnArrayFromArrayData for more details -ARROW_EXPORT Status ColumnArraysFromExecBatch(const ExecBatch& batch, int64_t start_row, - int64_t num_rows, - std::vector* column_arrays); +ARROW_COMPUTE_EXPORT Status +ColumnArraysFromExecBatch(const ExecBatch& batch, int64_t start_row, int64_t num_rows, + std::vector* column_arrays); /// \brief Create KeyColumnArray instances from an ExecBatch /// @@ -267,8 +268,8 @@ ARROW_EXPORT Status ColumnArraysFromExecBatch(const ExecBatch& batch, int64_t st /// /// All columns in `batch` must be eligible "key" columns and have an array shape /// \see ColumnArrayFromArrayData for more details -ARROW_EXPORT Status ColumnArraysFromExecBatch(const ExecBatch& batch, - std::vector* column_arrays); +ARROW_COMPUTE_EXPORT Status ColumnArraysFromExecBatch( + const ExecBatch& batch, std::vector* column_arrays); /// A lightweight resizable array for "key" columns /// @@ -276,7 +277,7 @@ ARROW_EXPORT Status ColumnArraysFromExecBatch(const ExecBatch& batch, /// /// Resizing is handled by arrow::ResizableBuffer and a doubling approach is /// used so that resizes will always grow up to the next power of 2 -class ARROW_EXPORT ResizableArrayData { +class ARROW_COMPUTE_EXPORT ResizableArrayData { public: /// \brief Create an uninitialized instance /// @@ -372,7 +373,7 @@ class ARROW_EXPORT ResizableArrayData { /// \brief A builder to concatenate batches of data into a larger batch /// /// Will only store num_rows_max() rows -class ARROW_EXPORT ExecBatchBuilder { +class ARROW_COMPUTE_EXPORT ExecBatchBuilder { public: /// \brief Add rows from `source` into `target` column /// diff --git a/cpp/src/arrow/compute/registry.cc b/cpp/src/arrow/compute/registry.cc index b4f1c0f2f97..37e9d6c930a 100644 --- a/cpp/src/arrow/compute/registry.cc +++ b/cpp/src/arrow/compute/registry.cc @@ -292,50 +292,6 @@ static std::unique_ptr CreateBuiltInRegistry() { RegisterVectorOptions(registry.get()); RegisterAggregateOptions(registry.get()); -#ifdef ARROW_COMPUTE - // Register additional kernels - - // Scalar functions - RegisterScalarArithmetic(registry.get()); - RegisterScalarBoolean(registry.get()); - RegisterScalarComparison(registry.get()); - RegisterScalarIfElse(registry.get()); - RegisterScalarNested(registry.get()); - RegisterScalarRandom(registry.get()); // Nullary - RegisterScalarRoundArithmetic(registry.get()); - RegisterScalarSetLookup(registry.get()); - RegisterScalarStringAscii(registry.get()); - RegisterScalarStringUtf8(registry.get()); - RegisterScalarTemporalBinary(registry.get()); - RegisterScalarTemporalUnary(registry.get()); - RegisterScalarValidity(registry.get()); - - // Vector functions - RegisterVectorArraySort(registry.get()); - RegisterVectorCumulativeSum(registry.get()); - RegisterVectorNested(registry.get()); - RegisterVectorRank(registry.get()); - RegisterVectorReplace(registry.get()); - RegisterVectorSelectK(registry.get()); - RegisterVectorSort(registry.get()); - RegisterVectorRunEndEncode(registry.get()); - RegisterVectorRunEndDecode(registry.get()); - RegisterVectorPairwise(registry.get()); - RegisterVectorStatistics(registry.get()); - RegisterVectorSwizzle(registry.get()); - - // Aggregate functions - RegisterHashAggregateBasic(registry.get()); - RegisterHashAggregateNumeric(registry.get()); - RegisterHashAggregatePivot(registry.get()); - RegisterScalarAggregateBasic(registry.get()); - RegisterScalarAggregateMode(registry.get()); - RegisterScalarAggregatePivot(registry.get()); - RegisterScalarAggregateQuantile(registry.get()); - RegisterScalarAggregateTDigest(registry.get()); - RegisterScalarAggregateVariance(registry.get()); -#endif - return registry; } diff --git a/cpp/src/arrow/compute/row/CMakeLists.txt b/cpp/src/arrow/compute/row/CMakeLists.txt index 747fd0a92d9..542dc314806 100644 --- a/cpp/src/arrow/compute/row/CMakeLists.txt +++ b/cpp/src/arrow/compute/row/CMakeLists.txt @@ -20,6 +20,11 @@ arrow_install_all_headers("arrow/compute/row") -if(ARROW_COMPUTE) +if(ARROW_BUILD_BENCHMARKS AND ARROW_COMPUTE) add_arrow_benchmark(grouper_benchmark PREFIX "arrow-compute") + if(ARROW_BUILD_STATIC) + target_link_libraries(arrow-compute-grouper-benchmark PUBLIC arrow_compute_static) + else() + target_link_libraries(arrow-compute-grouper-benchmark PUBLIC arrow_compute_shared) + endif() endif() diff --git a/cpp/src/arrow/compute/row/compare_internal.h b/cpp/src/arrow/compute/row/compare_internal.h index 29d7f859e59..264ef69b39f 100644 --- a/cpp/src/arrow/compute/row/compare_internal.h +++ b/cpp/src/arrow/compute/row/compare_internal.h @@ -23,6 +23,7 @@ #include "arrow/compute/row/encode_internal.h" #include "arrow/compute/row/row_internal.h" #include "arrow/compute/util.h" +#include "arrow/compute/visibility.h" #include "arrow/memory_pool.h" #include "arrow/result.h" #include "arrow/status.h" @@ -30,7 +31,7 @@ namespace arrow { namespace compute { -class ARROW_EXPORT KeyCompare { +class ARROW_COMPUTE_EXPORT KeyCompare { public: // Clarify the max temp stack usage for CompareColumnsToRows, which might be necessary // for the caller to be aware of (possibly at compile time) to reserve enough stack size diff --git a/cpp/src/arrow/compute/row/encode_internal.h b/cpp/src/arrow/compute/row/encode_internal.h index 5ad82e0c8e7..6bfb87e6f84 100644 --- a/cpp/src/arrow/compute/row/encode_internal.h +++ b/cpp/src/arrow/compute/row/encode_internal.h @@ -26,6 +26,7 @@ #include "arrow/compute/light_array_internal.h" #include "arrow/compute/row/row_internal.h" #include "arrow/compute/util.h" +#include "arrow/compute/visibility.h" #include "arrow/memory_pool.h" #include "arrow/result.h" #include "arrow/status.h" @@ -44,7 +45,7 @@ namespace compute { /// be accessed together, as in the case of hash table key. /// /// Does not support nested types -class ARROW_EXPORT RowTableEncoder { +class ARROW_COMPUTE_EXPORT RowTableEncoder { public: void Init(const std::vector& cols, int row_alignment, int string_alignment); diff --git a/cpp/src/arrow/compute/row/grouper.h b/cpp/src/arrow/compute/row/grouper.h index 7554e5ef159..9424559385b 100644 --- a/cpp/src/arrow/compute/row/grouper.h +++ b/cpp/src/arrow/compute/row/grouper.h @@ -21,6 +21,7 @@ #include #include "arrow/compute/kernel.h" +#include "arrow/compute/visibility.h" #include "arrow/datum.h" #include "arrow/result.h" #include "arrow/util/visibility.h" @@ -36,7 +37,7 @@ namespace compute { /// same segment key within a given batch. When a segment group span cross batches, it /// will have multiple segments. A segment never spans cross batches. The segment data /// structure only makes sense when used along with a exec batch. -struct ARROW_EXPORT Segment { +struct ARROW_COMPUTE_EXPORT Segment { /// \brief the offset into the batch where the segment starts int64_t offset; /// \brief the length of the segment @@ -74,7 +75,7 @@ inline bool operator!=(const Segment& segment1, const Segment& segment2) { /// /// If the next call to the segmenter starts with `A A` then that segment would set the /// "extends" flag, which indicates whether the segment continues the last open batch. -class ARROW_EXPORT RowSegmenter { +class ARROW_COMPUTE_EXPORT RowSegmenter { public: virtual ~RowSegmenter() = default; @@ -101,7 +102,7 @@ class ARROW_EXPORT RowSegmenter { }; /// Consumes batches of keys and yields batches of the group ids. -class ARROW_EXPORT Grouper { +class ARROW_COMPUTE_EXPORT Grouper { public: virtual ~Grouper() = default; diff --git a/cpp/src/arrow/compute/row/grouper_internal.h b/cpp/src/arrow/compute/row/grouper_internal.h index eb3dfe8ba16..bce9ea1d3d5 100644 --- a/cpp/src/arrow/compute/row/grouper_internal.h +++ b/cpp/src/arrow/compute/row/grouper_internal.h @@ -20,7 +20,7 @@ namespace arrow { namespace compute { -ARROW_EXPORT Result> MakeAnyKeysSegmenter( +ARROW_COMPUTE_EXPORT Result> MakeAnyKeysSegmenter( const std::vector& key_types, ExecContext* ctx); } // namespace compute diff --git a/cpp/src/arrow/compute/row/row_encoder_internal.h b/cpp/src/arrow/compute/row/row_encoder_internal.h index 2cb47d4a600..9337e78bf8a 100644 --- a/cpp/src/arrow/compute/row/row_encoder_internal.h +++ b/cpp/src/arrow/compute/row/row_encoder_internal.h @@ -20,6 +20,7 @@ #include #include "arrow/compute/kernels/codegen_internal.h" +#include "arrow/compute/visibility.h" #include "arrow/visit_data_inline.h" namespace arrow { @@ -29,7 +30,7 @@ using internal::checked_cast; namespace compute { namespace internal { -struct ARROW_EXPORT KeyEncoder { +struct ARROW_COMPUTE_EXPORT KeyEncoder { // the first byte of an encoded key is used to indicate nullity static constexpr bool kExtraByteForNull = true; @@ -85,7 +86,7 @@ struct ARROW_EXPORT KeyEncoder { } }; -struct ARROW_EXPORT BooleanKeyEncoder : KeyEncoder { +struct ARROW_COMPUTE_EXPORT BooleanKeyEncoder : KeyEncoder { static constexpr int kByteWidth = 1; void AddLength(const ExecValue& data, int64_t batch_length, int32_t* lengths) override; @@ -101,7 +102,7 @@ struct ARROW_EXPORT BooleanKeyEncoder : KeyEncoder { MemoryPool* pool) override; }; -struct ARROW_EXPORT FixedWidthKeyEncoder : KeyEncoder { +struct ARROW_COMPUTE_EXPORT FixedWidthKeyEncoder : KeyEncoder { explicit FixedWidthKeyEncoder(std::shared_ptr type) : type_(std::move(type)), byte_width_(checked_cast(*type_).bit_width() / 8) {} @@ -122,7 +123,7 @@ struct ARROW_EXPORT FixedWidthKeyEncoder : KeyEncoder { const int byte_width_; }; -struct ARROW_EXPORT DictionaryKeyEncoder : FixedWidthKeyEncoder { +struct ARROW_COMPUTE_EXPORT DictionaryKeyEncoder : FixedWidthKeyEncoder { DictionaryKeyEncoder(std::shared_ptr type, MemoryPool* pool) : FixedWidthKeyEncoder(std::move(type)), pool_(pool) {} @@ -251,7 +252,7 @@ struct VarLengthKeyEncoder : KeyEncoder { std::shared_ptr type_; }; -struct ARROW_EXPORT NullKeyEncoder : KeyEncoder { +struct ARROW_COMPUTE_EXPORT NullKeyEncoder : KeyEncoder { void AddLength(const ExecValue&, int64_t batch_length, int32_t* lengths) override {} void AddLengthNull(int32_t* length) override {} @@ -331,7 +332,7 @@ struct ARROW_EXPORT NullKeyEncoder : KeyEncoder { /// # Row Encoding /// /// The row format is the concatenation of the encodings of each column. -class ARROW_EXPORT RowEncoder { +class ARROW_COMPUTE_EXPORT RowEncoder { public: static constexpr int kRowIdForNulls() { return -1; } diff --git a/cpp/src/arrow/compute/row/row_internal.h b/cpp/src/arrow/compute/row/row_internal.h index 0919773a228..219fcbc51f4 100644 --- a/cpp/src/arrow/compute/row/row_internal.h +++ b/cpp/src/arrow/compute/row/row_internal.h @@ -21,6 +21,7 @@ #include "arrow/buffer.h" #include "arrow/compute/light_array_internal.h" +#include "arrow/compute/visibility.h" #include "arrow/memory_pool.h" #include "arrow/status.h" #include "arrow/util/logging.h" @@ -29,7 +30,7 @@ namespace arrow { namespace compute { /// Description of the data stored in a RowTable -struct ARROW_EXPORT RowTableMetadata { +struct ARROW_COMPUTE_EXPORT RowTableMetadata { using offset_type = int64_t; /// \brief True if there are no variable length columns in the table @@ -170,7 +171,7 @@ struct ARROW_EXPORT RowTableMetadata { /// Can store both fixed-size data types and variable-length data types /// /// The row table is not safe -class ARROW_EXPORT RowTableImpl { +class ARROW_COMPUTE_EXPORT RowTableImpl { public: using offset_type = RowTableMetadata::offset_type; diff --git a/cpp/src/arrow/compute/test_env.cc b/cpp/src/arrow/compute/test_env.cc new file mode 100644 index 00000000000..530ef5fa24d --- /dev/null +++ b/cpp/src/arrow/compute/test_env.cc @@ -0,0 +1,42 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +#include + +#include "arrow/compute/initialize.h" +#include "arrow/testing/gtest_util.h" + +namespace arrow::compute { + +namespace { + +class ComputeKernelEnvironment : public ::testing::Environment { + public: + // This must be done before using the compute kernels in order to + // register them to the FunctionRegistry. + ComputeKernelEnvironment() : ::testing::Environment() {} + + void SetUp() override { ASSERT_OK(arrow::compute::Initialize()); } +}; + +} // namespace + +// Initialize the compute module +::testing::Environment* compute_kernels_env = + ::testing::AddGlobalTestEnvironment(new ComputeKernelEnvironment); + +} // namespace arrow::compute diff --git a/cpp/src/arrow/compute/util.h b/cpp/src/arrow/compute/util.h index 1aaff43e10e..ad541e182a4 100644 --- a/cpp/src/arrow/compute/util.h +++ b/cpp/src/arrow/compute/util.h @@ -26,6 +26,7 @@ #include "arrow/compute/expression.h" #include "arrow/compute/type_fwd.h" +#include "arrow/compute/visibility.h" #include "arrow/result.h" #include "arrow/util/cpu_info.h" #include "arrow/util/simd.h" @@ -66,49 +67,54 @@ class MiniBatch { namespace bit_util { -ARROW_EXPORT void bits_to_indexes(int bit_to_search, int64_t hardware_flags, - const int num_bits, const uint8_t* bits, - int* num_indexes, uint16_t* indexes, - int bit_offset = 0); +ARROW_COMPUTE_EXPORT void bits_to_indexes(int bit_to_search, int64_t hardware_flags, + const int num_bits, const uint8_t* bits, + int* num_indexes, uint16_t* indexes, + int bit_offset = 0); -ARROW_EXPORT void bits_filter_indexes(int bit_to_search, int64_t hardware_flags, - const int num_bits, const uint8_t* bits, - const uint16_t* input_indexes, int* num_indexes, - uint16_t* indexes, int bit_offset = 0); +ARROW_COMPUTE_EXPORT void bits_filter_indexes(int bit_to_search, int64_t hardware_flags, + const int num_bits, const uint8_t* bits, + const uint16_t* input_indexes, + int* num_indexes, uint16_t* indexes, + int bit_offset = 0); // Input and output indexes may be pointing to the same data (in-place filtering). -ARROW_EXPORT void bits_split_indexes(int64_t hardware_flags, const int num_bits, - const uint8_t* bits, int* num_indexes_bit0, - uint16_t* indexes_bit0, uint16_t* indexes_bit1, - int bit_offset = 0); +ARROW_COMPUTE_EXPORT void bits_split_indexes(int64_t hardware_flags, const int num_bits, + const uint8_t* bits, int* num_indexes_bit0, + uint16_t* indexes_bit0, + uint16_t* indexes_bit1, int bit_offset = 0); // Bit 1 is replaced with byte 0xFF. -ARROW_EXPORT void bits_to_bytes(int64_t hardware_flags, const int num_bits, - const uint8_t* bits, uint8_t* bytes, int bit_offset = 0); +ARROW_COMPUTE_EXPORT void bits_to_bytes(int64_t hardware_flags, const int num_bits, + const uint8_t* bits, uint8_t* bytes, + int bit_offset = 0); // Return highest bit of each byte. -ARROW_EXPORT void bytes_to_bits(int64_t hardware_flags, const int num_bits, - const uint8_t* bytes, uint8_t* bits, int bit_offset = 0); +ARROW_COMPUTE_EXPORT void bytes_to_bits(int64_t hardware_flags, const int num_bits, + const uint8_t* bytes, uint8_t* bits, + int bit_offset = 0); -ARROW_EXPORT bool are_all_bytes_zero(int64_t hardware_flags, const uint8_t* bytes, - uint32_t num_bytes); +ARROW_COMPUTE_EXPORT bool are_all_bytes_zero(int64_t hardware_flags, const uint8_t* bytes, + uint32_t num_bytes); #if defined(ARROW_HAVE_RUNTIME_AVX2) && defined(ARROW_HAVE_RUNTIME_BMI2) // The functions below use BMI2 instructions, be careful before calling! namespace avx2 { -ARROW_EXPORT void bits_filter_indexes_avx2(int bit_to_search, const int num_bits, - const uint8_t* bits, - const uint16_t* input_indexes, - int* num_indexes, uint16_t* indexes); -ARROW_EXPORT void bits_to_indexes_avx2(int bit_to_search, const int num_bits, - const uint8_t* bits, int* num_indexes, - uint16_t* indexes, uint16_t base_index = 0); -ARROW_EXPORT void bits_to_bytes_avx2(const int num_bits, const uint8_t* bits, - uint8_t* bytes); -ARROW_EXPORT void bytes_to_bits_avx2(const int num_bits, const uint8_t* bytes, - uint8_t* bits); -ARROW_EXPORT bool are_all_bytes_zero_avx2(const uint8_t* bytes, uint32_t num_bytes); +ARROW_COMPUTE_EXPORT void bits_filter_indexes_avx2(int bit_to_search, const int num_bits, + const uint8_t* bits, + const uint16_t* input_indexes, + int* num_indexes, uint16_t* indexes); +ARROW_COMPUTE_EXPORT void bits_to_indexes_avx2(int bit_to_search, const int num_bits, + const uint8_t* bits, int* num_indexes, + uint16_t* indexes, + uint16_t base_index = 0); +ARROW_COMPUTE_EXPORT void bits_to_bytes_avx2(const int num_bits, const uint8_t* bits, + uint8_t* bytes); +ARROW_COMPUTE_EXPORT void bytes_to_bits_avx2(const int num_bits, const uint8_t* bytes, + uint8_t* bits); +ARROW_COMPUTE_EXPORT bool are_all_bytes_zero_avx2(const uint8_t* bytes, + uint32_t num_bytes); } // namespace avx2 #endif diff --git a/cpp/src/arrow/compute/util_internal.h b/cpp/src/arrow/compute/util_internal.h index 5e5b15a5ff6..301fd4939b4 100644 --- a/cpp/src/arrow/compute/util_internal.h +++ b/cpp/src/arrow/compute/util_internal.h @@ -17,6 +17,7 @@ #pragma once +#include "arrow/compute/visibility.h" #include "arrow/status.h" #include "arrow/type_fwd.h" #include "arrow/util/logging.h" @@ -34,7 +35,7 @@ void CheckAlignment(const void* ptr) { /// Temporary vectors should resemble allocating temporary variables on the stack /// but in the context of vectorized processing where we need to store a vector of /// temporaries instead of a single value. -class ARROW_EXPORT TempVectorStack { +class ARROW_COMPUTE_EXPORT TempVectorStack { template friend class TempVectorHolder; diff --git a/cpp/src/arrow/compute/visibility.h b/cpp/src/arrow/compute/visibility.h new file mode 100644 index 00000000000..ae994bd2333 --- /dev/null +++ b/cpp/src/arrow/compute/visibility.h @@ -0,0 +1,49 @@ +// Licensed to the Apache Software Foundation (ASF) under one +// or more contributor license agreements. See the NOTICE file +// distributed with this work for additional information +// regarding copyright ownership. The ASF licenses this file +// to you under the Apache License, Version 2.0 (the +// "License"); you may not use this file except in compliance +// with the License. You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, +// software distributed under the License is distributed on an +// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +// KIND, either express or implied. See the License for the +// specific language governing permissions and limitations +// under the License. + +#pragma once + +#if defined(_WIN32) || defined(__CYGWIN__) +# if defined(_MSC_VER) +# pragma warning(push) +# pragma warning(disable : 4251) +# else +# pragma GCC diagnostic ignored "-Wattributes" +# endif + +# ifdef ARROW_COMPUTE_STATIC +# define ARROW_COMPUTE_EXPORT +# elif defined(ARROW_COMPUTE_EXPORTING) +# define ARROW_COMPUTE_EXPORT __declspec(dllexport) +# else +# define ARROW_COMPUTE_EXPORT __declspec(dllimport) +# endif + +# define ARROW_COMPUTE_NO_EXPORT + +# if defined(_MSC_VER) +# pragma warning(pop) +# endif + +#else // Not Windows +# ifndef ARROW_COMPUTE_EXPORT +# define ARROW_COMPUTE_EXPORT __attribute__((visibility("default"))) +# endif +# ifndef ARROW_COMPUTE_NO_EXPORT +# define ARROW_COMPUTE_NO_EXPORT __attribute__((visibility("hidden"))) +# endif +#endif diff --git a/cpp/src/arrow/dataset/CMakeLists.txt b/cpp/src/arrow/dataset/CMakeLists.txt index 34e26a4cb90..809bdfaae6c 100644 --- a/cpp/src/arrow/dataset/CMakeLists.txt +++ b/cpp/src/arrow/dataset/CMakeLists.txt @@ -40,8 +40,8 @@ set(ARROW_DATASET_SRCS scanner.cc scan_node.cc) -set(ARROW_DATASET_PKG_CONFIG_REQUIRES "arrow-acero") -set(ARROW_DATASET_REQUIRED_DEPENDENCIES Arrow ArrowAcero) +set(ARROW_DATASET_PKG_CONFIG_REQUIRES "arrow-acero arrow-compute") +set(ARROW_DATASET_REQUIRED_DEPENDENCIES Arrow ArrowCompute ArrowAcero) if(ARROW_PARQUET) string(APPEND ARROW_DATASET_PKG_CONFIG_REQUIRES " parquet") list(APPEND ARROW_DATASET_REQUIRED_DEPENDENCIES Parquet) diff --git a/cpp/src/arrow/engine/CMakeLists.txt b/cpp/src/arrow/engine/CMakeLists.txt index 94bee50089a..adf98087ad1 100644 --- a/cpp/src/arrow/engine/CMakeLists.txt +++ b/cpp/src/arrow/engine/CMakeLists.txt @@ -87,6 +87,7 @@ add_arrow_test(substrait_test substrait/test_util.cc EXTRA_LINK_LIBS ${ARROW_SUBSTRAIT_TEST_LINK_LIBS} + arrow_compute_testing PREFIX "arrow-substrait" LABELS diff --git a/cpp/src/arrow/flight/sql/CMakeLists.txt b/cpp/src/arrow/flight/sql/CMakeLists.txt index 6f34e6e3798..958fea40acf 100644 --- a/cpp/src/arrow/flight/sql/CMakeLists.txt +++ b/cpp/src/arrow/flight/sql/CMakeLists.txt @@ -119,6 +119,7 @@ if(ARROW_BUILD_TESTS OR ARROW_BUILD_EXAMPLES) set(ARROW_FLIGHT_SQL_TEST_LIBS ${SQLite3_LIBRARIES}) set(ARROW_FLIGHT_SQL_ACERO_SRCS example/acero_server.cc) + set(ARROW_FLIGHT_SQL_EXTRA_LINK_LIBS "") if(ARROW_COMPUTE AND ARROW_PARQUET @@ -129,6 +130,7 @@ if(ARROW_BUILD_TESTS OR ARROW_BUILD_EXAMPLES) else() list(APPEND ARROW_FLIGHT_SQL_TEST_LIBS arrow_substrait_shared) endif() + list(APPEND ARROW_FLIGHT_SQL_EXTRA_LINK_LIBS arrow_compute_testing) if(ARROW_BUILD_EXAMPLES) add_executable(acero-flight-sql-server ${ARROW_FLIGHT_SQL_ACERO_SRCS} @@ -146,6 +148,8 @@ if(ARROW_BUILD_TESTS OR ARROW_BUILD_EXAMPLES) STATIC_LINK_LIBS ${ARROW_FLIGHT_SQL_TEST_LINK_LIBS} ${ARROW_FLIGHT_SQL_TEST_LIBS} + EXTRA_LINK_LIBS + ${ARROW_FLIGHT_SQL_EXTRA_LINK_LIBS} EXTRA_INCLUDES "${CMAKE_CURRENT_BINARY_DIR}/../" LABELS diff --git a/cpp/src/arrow/type.h b/cpp/src/arrow/type.h index de86d2845b9..f68d2dcb619 100644 --- a/cpp/src/arrow/type.h +++ b/cpp/src/arrow/type.h @@ -292,6 +292,7 @@ std::ostream& operator<<(std::ostream& os, const TypeHolder& type); /// - if a `PhysicalType` alias exists in the concrete type class, return /// an instance of `PhysicalType`. /// - otherwise, return the input type itself. +ARROW_EXPORT std::shared_ptr GetPhysicalType(const std::shared_ptr& type); /// \brief Base class for all fixed-width data types diff --git a/dev/tasks/linux-packages/apache-arrow/debian/control.in b/dev/tasks/linux-packages/apache-arrow/debian/control.in index 21ffcf8d5ed..3dc67b066bb 100644 --- a/dev/tasks/linux-packages/apache-arrow/debian/control.in +++ b/dev/tasks/linux-packages/apache-arrow/debian/control.in @@ -67,6 +67,19 @@ Description: Apache Arrow is a data processing library for analysis . This package provides tools. +Package: libarrow-compute2100 +Section: libs +Architecture: any +Multi-Arch: same +Pre-Depends: ${misc:Pre-Depends} +Depends: + ${misc:Depends}, + ${shlibs:Depends}, + libarrow2100 (= ${binary:Version}) +Description: Apache Arrow is a data processing library for analysis + . + This package provides C++ library files for Compute support. + Package: libarrow-cuda2100 Section: libs Architecture: @CUDA_ARCHITECTURE@ @@ -88,7 +101,7 @@ Pre-Depends: ${misc:Pre-Depends} Depends: ${misc:Depends}, ${shlibs:Depends}, - libarrow2100 (= ${binary:Version}) + libarrow-compute2100 (= ${binary:Version}) Description: Apache Arrow is a data processing library for analysis . This package provides C++ library files for Acero module. @@ -161,6 +174,17 @@ Description: Apache Arrow is a data processing library for analysis . This package provides C++ header files. +Package: libarrow-compute-dev +Section: libdevel +Architecture: any +Multi-Arch: same +Depends: + ${misc:Depends}, + libarrow-compute2100 (= ${binary:Version}) +Description: Apache Arrow is a data processing library for analysis + . + This package provides C++ header files for compute module. + Package: libarrow-cuda-dev Section: libdevel Architecture: @CUDA_ARCHITECTURE@ @@ -320,6 +344,7 @@ Multi-Arch: same Depends: ${misc:Depends}, libglib2.0-dev, + libarrow-compute-dev (= ${binary:Version}), libarrow-acero-dev (= ${binary:Version}), libarrow-glib2100 (= ${binary:Version}), gir1.2-arrow-1.0 (= ${binary:Version}) diff --git a/dev/tasks/linux-packages/apache-arrow/debian/libarrow-compute-dev.install b/dev/tasks/linux-packages/apache-arrow/debian/libarrow-compute-dev.install new file mode 100644 index 00000000000..44b63512be2 --- /dev/null +++ b/dev/tasks/linux-packages/apache-arrow/debian/libarrow-compute-dev.install @@ -0,0 +1,4 @@ +usr/lib/*/cmake/ArrowCompute/ +usr/lib/*/libarrow_compute.a +usr/lib/*/libarrow_compute.so +usr/lib/*/pkgconfig/arrow-compute.pc diff --git a/dev/tasks/linux-packages/apache-arrow/debian/libarrow-compute2100.install b/dev/tasks/linux-packages/apache-arrow/debian/libarrow-compute2100.install new file mode 100644 index 00000000000..f014d075f75 --- /dev/null +++ b/dev/tasks/linux-packages/apache-arrow/debian/libarrow-compute2100.install @@ -0,0 +1 @@ +usr/lib/*/libarrow_compute.so.* diff --git a/dev/tasks/linux-packages/apache-arrow/debian/libarrow-dev.install b/dev/tasks/linux-packages/apache-arrow/debian/libarrow-dev.install index 9df014c54ca..802095804ab 100644 --- a/dev/tasks/linux-packages/apache-arrow/debian/libarrow-dev.install +++ b/dev/tasks/linux-packages/apache-arrow/debian/libarrow-dev.install @@ -3,7 +3,6 @@ usr/lib/*/cmake/Arrow/ usr/lib/*/libarrow.a usr/lib/*/libarrow.so usr/lib/*/libarrow_bundled_dependencies.a -usr/lib/*/pkgconfig/arrow-compute.pc usr/lib/*/pkgconfig/arrow-csv.pc usr/lib/*/pkgconfig/arrow-filesystem.pc usr/lib/*/pkgconfig/arrow-json.pc diff --git a/dev/tasks/linux-packages/apache-arrow/yum/arrow.spec.in b/dev/tasks/linux-packages/apache-arrow/yum/arrow.spec.in index 8557071ee6c..47e8230a071 100644 --- a/dev/tasks/linux-packages/apache-arrow/yum/arrow.spec.in +++ b/dev/tasks/linux-packages/apache-arrow/yum/arrow.spec.in @@ -320,6 +320,8 @@ Libraries and header files for Apache Arrow C++. %{_datadir}/gdb/auto-load/ %{_includedir}/arrow/ %exclude %{_includedir}/arrow/acero/ +%exclude %{_includedir}/arrow/compute/kernels +%exclude %{_includedir}/arrow/compute/row %exclude %{_includedir}/arrow/dataset/ %if %{use_flight} %exclude %{_includedir}/arrow/flight/ @@ -328,17 +330,50 @@ Libraries and header files for Apache Arrow C++. %{_libdir}/libarrow.a %{_libdir}/libarrow.so %{_libdir}/libarrow_bundled_dependencies.a -%{_libdir}/pkgconfig/arrow-compute.pc %{_libdir}/pkgconfig/arrow-csv.pc %{_libdir}/pkgconfig/arrow-filesystem.pc %{_libdir}/pkgconfig/arrow-json.pc %{_libdir}/pkgconfig/arrow-orc.pc %{_libdir}/pkgconfig/arrow.pc +%package -n %{name}%{so_version}-compute-libs +Summary: C++ library for extra compute functions +License: Apache-2.0 +Requires: %{name}%{so_version}-libs = %{version}-%{release} + +%description -n %{name}%{so_version}-compute-libs +This package contains the libraries for Apache Arrow Compute. + +%files -n %{name}%{so_version}-compute-libs +%defattr(-,root,root,-) +%doc README.md +%license LICENSE.txt NOTICE.txt +%{_libdir}/libarrow_compute.so.* + +%package compute-devel +Summary: Libraries and header files for Apache Arrow Compute +License: Apache-2.0 +Requires: %{name}%{so_version}-compute-libs = %{version}-%{release} +Requires: %{name}-devel = %{version}-%{release} + +%description compute-devel +Libraries and header files for Apache Arrow Compute + +%files compute-devel +%defattr(-,root,root,-) +%doc README.md +%license LICENSE.txt NOTICE.txt +%{_includedir}/arrow/compute/kernels +%{_includedir}/arrow/compute/row +%{_libdir}/cmake/ArrowCompute/ +%{_libdir}/libarrow_compute.a +%{_libdir}/libarrow_compute.so +%{_libdir}/pkgconfig/arrow-compute.pc + %package -n %{name}%{so_version}-acero-libs Summary: C++ library to execute a query in streaming License: Apache-2.0 -Requires: %{name}%{so_version}-libs = %{version}-%{release} +Requires: %{name}%{so_version}-compute-libs = %{version}-%{release} %description -n %{name}%{so_version}-acero-libs This package contains the libraries for Apache Arrow Acero. diff --git a/dev/tasks/tasks.yml b/dev/tasks/tasks.yml index 04d67abfeee..352084a34c4 100644 --- a/dev/tasks/tasks.yml +++ b/dev/tasks/tasks.yml @@ -338,6 +338,9 @@ tasks: - libarrow-acero-dev_{no_rc_version}-1_[a-z0-9]+.deb - libarrow-acero{so_version}-dbgsym_{no_rc_version}-1_[a-z0-9]+.d?deb - libarrow-acero{so_version}_{no_rc_version}-1_[a-z0-9]+.deb + - libarrow-compute-dev_{no_rc_version}-1_[a-z0-9]+.deb + - libarrow-compute{so_version}-dbgsym_{no_rc_version}-1_[a-z0-9]+.d?deb + - libarrow-compute{so_version}_{no_rc_version}-1_[a-z0-9]+.deb - libarrow-dataset-dev_{no_rc_version}-1_[a-z0-9]+.deb - libarrow-dataset-glib-dev_{no_rc_version}-1_[a-z0-9]+.deb - libarrow-dataset-glib-doc_{no_rc_version}-1_[a-z0-9]+.deb @@ -425,6 +428,11 @@ tasks: - arrow-acero-devel-{no_rc_version}-1.[a-z0-9]+.[a-z0-9_]+.rpm {% if not is_rhel7_based %} - arrow[0-9]+-acero-libs-debuginfo-{no_rc_version}-1.[a-z0-9]+.[a-z0-9_]+.rpm + {% endif %} + - arrow[0-9]+-compute-libs-{no_rc_version}-1.[a-z0-9]+.[a-z0-9_]+.rpm + - arrow-compute-devel-{no_rc_version}-1.[a-z0-9]+.[a-z0-9_]+.rpm + {% if not is_rhel7_based %} + - arrow[0-9]+-compute-libs-debuginfo-{no_rc_version}-1.[a-z0-9]+.[a-z0-9_]+.rpm {% endif %} - arrow[0-9]+-dataset-libs-{no_rc_version}-1.[a-z0-9]+.[a-z0-9_]+.rpm - arrow-dataset-devel-{no_rc_version}-1.[a-z0-9]+.[a-z0-9_]+.rpm diff --git a/docs/source/cpp/compute.rst b/docs/source/cpp/compute.rst index 585845d8306..b25ece967c1 100644 --- a/docs/source/cpp/compute.rst +++ b/docs/source/cpp/compute.rst @@ -28,7 +28,8 @@ Compute Functions The generic Compute API ======================= -.. TODO: describe API and how to invoke compute functions +.. seealso:: + :doc:`Compute Functions API reference ` Functions and function registry ------------------------------- @@ -42,6 +43,17 @@ whether the inputs are integral or floating-point). Functions are stored in a global :class:`FunctionRegistry` where they can be looked up by name. +Compute Initialization +---------------------- + +The compute library requires a call to :func:`arrow::compute::Initialize` +in order to register the individual functions into the global :class:`FunctionRegistry`, +otherwise only the functions required for Arrow core functionality will be available. + +.. note:: + The set of functions required for Arrow core functionality are an implementation detail + of the library, and should not be considered stable. + Input shapes ------------ diff --git a/python/CMakeLists.txt b/python/CMakeLists.txt index 048f01ab9f0..4138d2b282f 100644 --- a/python/CMakeLists.txt +++ b/python/CMakeLists.txt @@ -426,6 +426,19 @@ if(PYARROW_BUILD_ACERO) endif() endif() +# Currently PyArrow cannot be built without ARROW_COMPUTE +if(NOT ARROW_COMPUTE) + message(FATAL_ERROR "You must build Arrow C++ with ARROW_COMPUTE=ON") +else() + message(STATUS "Building PyArrow with Compute") + find_package(ArrowCompute REQUIRED) + if(ARROW_BUILD_SHARED) + list(APPEND PYARROW_CPP_LINK_LIBS ArrowCompute::arrow_compute_shared) + else() + list(APPEND PYARROW_CPP_LINK_LIBS ArrowCompute::arrow_compute_static) + endif() +endif() + if(PYARROW_BUILD_PARQUET) message(STATUS "Building PyArrow with Parquet") if(NOT ARROW_PARQUET) @@ -643,12 +656,13 @@ get_filename_component(ARROW_INCLUDE_ARROW_DIR_REAL ${ARROW_INCLUDE_DIR}/arrow R install(DIRECTORY ${ARROW_INCLUDE_ARROW_DIR_REAL} DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}) if(PYARROW_BUNDLE_ARROW_CPP) - # Arrow + # Arrow and Compute bundle_arrow_lib(${ARROW_SHARED_LIB} SO_VERSION ${ARROW_SO_VERSION}) + bundle_arrow_lib(${ARROW_COMPUTE_SHARED_LIB} SO_VERSION ${ARROW_SO_VERSION}) if(MSVC) - # TODO(kszucs): locate msvcp140.dll in a portable fashion and bundle it bundle_arrow_import_lib(${ARROW_IMPORT_LIB}) + bundle_arrow_import_lib(${ARROW_COMPUTE_IMPORT_LIB}) endif() endif() diff --git a/python/pyarrow/_compute.pyx b/python/pyarrow/_compute.pyx index bf01f94a3e1..2ff126b8b39 100644 --- a/python/pyarrow/_compute.pyx +++ b/python/pyarrow/_compute.pyx @@ -40,6 +40,10 @@ except ImportError: import warnings +# Call to initialize the compute module (register kernels) on import +check_status(InitializeCompute()) + + __pas = None _substrait_msg = ( "The pyarrow installation is not built with support for Substrait." diff --git a/python/pyarrow/includes/libarrow.pxd b/python/pyarrow/includes/libarrow.pxd index 8c45494f94c..3759de8dc18 100644 --- a/python/pyarrow/includes/libarrow.pxd +++ b/python/pyarrow/includes/libarrow.pxd @@ -2229,6 +2229,8 @@ cdef extern from "arrow/util/thread_pool.h" namespace "arrow::internal" nogil: cdef extern from "arrow/compute/api.h" namespace "arrow::compute" nogil: + CStatus InitializeCompute " arrow::compute::Initialize"() + cdef cppclass CExecBatch "arrow::compute::ExecBatch": vector[CDatum] values int64_t length diff --git a/r/R/arrowExports.R b/r/R/arrowExports.R index 901898e5b29..4ed612fc734 100644 --- a/r/R/arrowExports.R +++ b/r/R/arrowExports.R @@ -536,6 +536,10 @@ compute__GetFunctionNames <- function() { .Call(`_arrow_compute__GetFunctionNames`) } +compute__Initialize <- function() { + invisible(.Call(`_arrow_compute__Initialize`)) +} + RegisterScalarUDF <- function(name, func_sexp) { invisible(.Call(`_arrow_RegisterScalarUDF`, name, func_sexp)) } diff --git a/r/configure b/r/configure index e1f0bad3787..5ecd4a761b4 100755 --- a/r/configure +++ b/r/configure @@ -333,6 +333,13 @@ add_feature_flags () { # NOTE: parquet is assumed to have the same -L flag as arrow # so there is no need to add its location to PKG_DIRS fi + if arrow_built_with ARROW_COMPUTE; then + PKG_CFLAGS_FEATURES="$PKG_CFLAGS_FEATURES -DARROW_R_WITH_COMPUTE" + PKG_CONFIG_NAMES_FEATURES="$PKG_CONFIG_NAMES_FEATURES arrow-compute" + PKG_LIBS_FEATURES_WITHOUT_PC="-larrow_compute $PKG_LIBS_FEATURES_WITHOUT_PC" + # NOTE: arrow_compute is assumed to have the same -L flag as arrow + # so there is no need to add its location to PKG_DIRS + fi if arrow_built_with ARROW_DATASET; then PKG_CFLAGS_FEATURES="$PKG_CFLAGS_FEATURES -DARROW_R_WITH_DATASET" PKG_CONFIG_NAMES_FEATURES="$PKG_CONFIG_NAMES_FEATURES arrow-dataset" diff --git a/r/configure.win b/r/configure.win index e0682917e9b..ae175f5622b 100755 --- a/r/configure.win +++ b/r/configure.win @@ -81,12 +81,13 @@ function configure_binaries() { # NOTE: If you make changes to the libraries below, you should also change # ci/scripts/r_windows_build.sh and ci/scripts/PKGBUILD - PKG_CFLAGS="-I${RWINLIB}/include -DARROW_STATIC -DPARQUET_STATIC -DARROW_DS_STATIC \ - -DARROW_ACERO_STATIC -DARROW_R_WITH_PARQUET -DARROW_R_WITH_ACERO \ + PKG_CFLAGS="-I${RWINLIB}/include -DARROW_STATIC -DARROW_COMPUTE_STATIC -DPARQUET_STATIC \ + -DARROW_DS_STATIC -DARROW_ACERO_STATIC -DARROW_R_WITH_PARQUET \ + -DARROW_R_WITH_COMPUTE -DARROW_R_WITH_ACERO \ -DARROW_R_WITH_DATASET -DARROW_R_WITH_JSON" PKG_LIBS="-L${RWINLIB}/lib"'$(subst gcc,,$(COMPILED_BY))$(R_ARCH) ' PKG_LIBS="$PKG_LIBS -L${RWINLIB}/lib"'$(R_ARCH)$(CRT) ' - PKG_LIBS="$PKG_LIBS -larrow_dataset -larrow_acero -lparquet -larrow -larrow_bundled_dependencies \ + PKG_LIBS="$PKG_LIBS -larrow_dataset -larrow_acero -lparquet -larrow_compute -larrow -larrow_bundled_dependencies \ -lutf8proc -lsnappy -lz -lzstd -llz4 -lbz2 ${BROTLI_LIBS} -lole32 \ ${MIMALLOC_LIBS} ${OPENSSL_LIBS}" @@ -160,6 +161,13 @@ add_feature_flags () { # NOTE: parquet is assumed to have the same -L flag as arrow # so there is no need to add its location to PKG_DIRS fi + if arrow_built_with ARROW_COMPUTE; then + PKG_CFLAGS_FEATURES="$PKG_CFLAGS_FEATURES -DARROW_R_WITH_COMPUTE" + PKG_CONFIG_NAMES_FEATURES="$PKG_CONFIG_NAMES_FEATURES arrow-compute" + PKG_LIBS_FEATURES_WITHOUT_PC="-larrow_compute $PKG_LIBS_FEATURES_WITHOUT_PC" + # NOTE: arrow_compute is assumed to have the same -L flag as arrow + # so there is no need to add its location to PKG_DIRS + fi if arrow_built_with ARROW_DATASET; then PKG_CFLAGS_FEATURES="$PKG_CFLAGS_FEATURES -DARROW_R_WITH_DATASET" PKG_CONFIG_NAMES_FEATURES="$PKG_CONFIG_NAMES_FEATURES arrow-dataset" @@ -269,6 +277,11 @@ function configure_dev() { PKG_CONFIG_PACKAGES="$PKG_CONFIG_PACKAGES parquet" fi + if [ $(cmake_option ARROW_COMPUTE) -eq 1 ]; then + PKG_CFLAGS="$PKG_CFLAGS -DARROW_R_WITH_COMPUTE" + PKG_CONFIG_PACKAGES="$PKG_CONFIG_PACKAGES arrow-compute" + fi + if [ $(cmake_option ARROW_ACERO) -eq 1 ]; then PKG_CFLAGS="$PKG_CFLAGS -DARROW_R_WITH_ACERO" PKG_CONFIG_PACKAGES="$PKG_CONFIG_PACKAGES arrow-acero" diff --git a/r/data-raw/codegen.R b/r/data-raw/codegen.R index 4f027a3d9dd..d211dc412c8 100644 --- a/r/data-raw/codegen.R +++ b/r/data-raw/codegen.R @@ -191,6 +191,7 @@ static const R_CallMethodDef CallEntries[] = { arrow::r::altrep::Init_Altrep_classes(dll); #endif + _arrow_compute__Initialize(); } \n' ) diff --git a/r/src/arrowExports.cpp b/r/src/arrowExports.cpp index e75d38a303f..c71d1c77305 100644 --- a/r/src/arrowExports.cpp +++ b/r/src/arrowExports.cpp @@ -1350,6 +1350,14 @@ BEGIN_CPP11 END_CPP11 } // compute.cpp +void compute__Initialize(); +extern "C" SEXP _arrow_compute__Initialize(){ +BEGIN_CPP11 + compute__Initialize(); + return R_NilValue; +END_CPP11 +} +// compute.cpp void RegisterScalarUDF(std::string name, cpp11::list func_sexp); extern "C" SEXP _arrow_RegisterScalarUDF(SEXP name_sexp, SEXP func_sexp_sexp){ BEGIN_CPP11 @@ -5805,6 +5813,7 @@ static const R_CallMethodDef CallEntries[] = { { "_arrow_Table__cast", (DL_FUNC) &_arrow_Table__cast, 3}, { "_arrow_compute__CallFunction", (DL_FUNC) &_arrow_compute__CallFunction, 3}, { "_arrow_compute__GetFunctionNames", (DL_FUNC) &_arrow_compute__GetFunctionNames, 0}, + { "_arrow_compute__Initialize", (DL_FUNC) &_arrow_compute__Initialize, 0}, { "_arrow_RegisterScalarUDF", (DL_FUNC) &_arrow_RegisterScalarUDF, 2}, { "_arrow_build_info", (DL_FUNC) &_arrow_build_info, 0}, { "_arrow_runtime_info", (DL_FUNC) &_arrow_runtime_info, 0}, @@ -6223,6 +6232,7 @@ extern "C" void R_init_arrow(DllInfo* dll){ arrow::r::altrep::Init_Altrep_classes(dll); #endif + _arrow_compute__Initialize(); } diff --git a/r/src/compute.cpp b/r/src/compute.cpp index bd97e30005c..0777ca8bc72 100644 --- a/r/src/compute.cpp +++ b/r/src/compute.cpp @@ -621,6 +621,12 @@ std::vector compute__GetFunctionNames() { return arrow::compute::GetFunctionRegistry()->GetFunctionNames(); } +// [[arrow::export]] +void compute__Initialize() { + auto status = arrow::compute::Initialize(); + StopIfNotOk(status); +} + class RScalarUDFKernelState : public arrow::compute::KernelState { public: RScalarUDFKernelState(cpp11::sexp exec_func, cpp11::sexp resolver) diff --git a/ruby/red-arrow/lib/arrow/loader.rb b/ruby/red-arrow/lib/arrow/loader.rb index b56350ddac2..89a219bac32 100644 --- a/ruby/red-arrow/lib/arrow/loader.rb +++ b/ruby/red-arrow/lib/arrow/loader.rb @@ -31,6 +31,7 @@ def post_load(repository, namespace) require_extension_library gc_guard self.class.start_callback_dispatch_thread + @base_module.compute_initialize end def require_libraries From 2f499cf11a029949be9bcde427877d1ca21e764d Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ra=C3=BAl=20Cumplido?= Date: Fri, 13 Jun 2025 11:22:13 +0200 Subject: [PATCH 50/63] GH-46801: [Dev] Remove some leftovers for Java, Go, JS and Swift on some config files (#46802) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ### Rationale for this change The config for Java, Go, JS and Swift on those files is not relevant anymore. ### What changes are included in this PR? Remove references for those implementations from labeler, codeowners and editorconfig files. ### Are these changes tested? No ### Are there any user-facing changes? No * GitHub Issue: #46801 Authored-by: Raúl Cumplido Signed-off-by: Sutou Kouhei --- .editorconfig | 9 --------- .github/CODEOWNERS | 4 ---- .github/workflows/dev_pr/labeler.yml | 15 --------------- 3 files changed, 28 deletions(-) diff --git a/.editorconfig b/.editorconfig index 999f94bae00..67b30b62ad4 100644 --- a/.editorconfig +++ b/.editorconfig @@ -50,15 +50,6 @@ indent_style = space indent_size = 2 indent_style = space -[*.go] -indent_size = 8 -indent_style = tab -tab_width = 8 - -[*.{js,ts}] -indent_size = 4 -indent_style = space - [*.{py,pyx,pxd,pxi}] indent_size = 4 indent_style = space diff --git a/.github/CODEOWNERS b/.github/CODEOWNERS index 6084671f1c2..79e658f52f4 100644 --- a/.github/CODEOWNERS +++ b/.github/CODEOWNERS @@ -34,16 +34,12 @@ /cpp/src/arrow/flight/ @lidavidm /cpp/src/parquet @wgtmac /csharp/ @curthagenlocher -/go/ @zeroshade -/java/ @lidavidm -/js/ @domoritz @trxcllnt /matlab/ @kevingurney @kou @sgilmore10 /python/ @AlenkaF @raulcd @rok /python/pyarrow/_flight.pyx @lidavidm /python/pyarrow/**/*gandiva* @wjones127 /r/ @jonkeane @thisisnic /ruby/ @kou -/swift/ @kou # Docs # /docs/ diff --git a/.github/workflows/dev_pr/labeler.yml b/.github/workflows/dev_pr/labeler.yml index 7ef92f0be9b..1c50b24fd6b 100644 --- a/.github/workflows/dev_pr/labeler.yml +++ b/.github/workflows/dev_pr/labeler.yml @@ -30,16 +30,6 @@ - any-glob-to-any-file: - csharp/**/* -"Component: Go": -- changed-files: - - any-glob-to-any-file: - - go/**/* - -"Component: JavaScript": -- changed-files: - - any-glob-to-any-file: - - js/**/* - "Component: MATLAB": - changed-files: - any-glob-to-any-file: @@ -60,11 +50,6 @@ - any-glob-to-any-file: - ruby/**/* -"Component: Swift": -- changed-files: - - any-glob-to-any-file: - - swift/**/* - "Component: FlightRPC": - changed-files: - any-glob-to-any-file: From a77bfec0b30cc8a716cfe9a28003d3980dfc42f0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ra=C3=BAl=20Cumplido?= Date: Fri, 13 Jun 2025 11:22:51 +0200 Subject: [PATCH 51/63] GH-46803: [Swift] Remove swift implementation from apache/arrow after migration to new repository (#46804) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ### Rationale for this change The Swift implementation has been moved to their own repository at https://github.com/apache/arrow-swift ### What changes are included in this PR? Remove `swift/` subfolder and github workflow for CI job. ### Are these changes tested? No ### Are there any user-facing changes? No but for developers of the swift implementation it has been moved to their own repository. * GitHub Issue: #46803 Authored-by: Raúl Cumplido Signed-off-by: Sutou Kouhei --- .github/workflows/swift.yml | 89 - swift/.editorconfig | 28 - swift/.swiftlint.yml | 38 - swift/Arrow/.gitignore | 9 - swift/Arrow/Package.swift | 67 - swift/Arrow/README.md | 56 - swift/Arrow/Sources/Arrow/ArrowArray.swift | 327 -- .../Sources/Arrow/ArrowArrayBuilder.swift | 341 -- swift/Arrow/Sources/Arrow/ArrowBuffer.swift | 88 - .../Sources/Arrow/ArrowBufferBuilder.swift | 381 -- .../Arrow/Sources/Arrow/ArrowCExporter.swift | 141 - .../Arrow/Sources/Arrow/ArrowCImporter.swift | 180 - swift/Arrow/Sources/Arrow/ArrowData.swift | 67 - swift/Arrow/Sources/Arrow/ArrowDecoder.swift | 376 -- swift/Arrow/Sources/Arrow/ArrowEncoder.swift | 456 -- swift/Arrow/Sources/Arrow/ArrowReader.swift | 419 -- .../Sources/Arrow/ArrowReaderHelper.swift | 298 - swift/Arrow/Sources/Arrow/ArrowSchema.swift | 73 - swift/Arrow/Sources/Arrow/ArrowTable.swift | 202 - swift/Arrow/Sources/Arrow/ArrowType.swift | 405 -- swift/Arrow/Sources/Arrow/ArrowWriter.swift | 434 -- .../Sources/Arrow/ArrowWriterHelper.swift | 135 - swift/Arrow/Sources/Arrow/BitUtility.swift | 40 - swift/Arrow/Sources/Arrow/ChunkedArray.swift | 149 - .../Arrow/Sources/Arrow/File_generated.swift | 160 - .../Arrow/Sources/Arrow/MemoryAllocator.swift | 31 - .../Sources/Arrow/Message_generated.swift | 421 -- swift/Arrow/Sources/Arrow/ProtoUtil.swift | 80 - .../Sources/Arrow/Schema_generated.swift | 1479 ----- .../Arrow/SparseTensor_generated.swift | 535 -- .../Sources/Arrow/Tensor_generated.swift | 193 - swift/Arrow/Sources/ArrowC/ArrowCData.c | 31 - .../Arrow/Sources/ArrowC/include/ArrowCData.h | 78 - .../Tests/ArrowTests/ArrayBuilderTest.swift | 85 - swift/Arrow/Tests/ArrowTests/ArrayTests.swift | 371 -- swift/Arrow/Tests/ArrowTests/CDataTests.swift | 125 - .../Arrow/Tests/ArrowTests/CodableTests.swift | 334 -- swift/Arrow/Tests/ArrowTests/IPCTests.swift | 616 -- .../Tests/ArrowTests/RecordBatchTests.swift | 58 - swift/Arrow/Tests/ArrowTests/TableTests.swift | 211 - swift/ArrowFlight/.gitignore | 9 - swift/ArrowFlight/Package.swift | 61 - .../Sources/ArrowFlight/Flight.grpc.swift | 1343 ----- .../Sources/ArrowFlight/Flight.pb.swift | 1366 ----- .../Sources/ArrowFlight/FlightAction.swift | 39 - .../ArrowFlight/FlightActionType.swift | 38 - .../Sources/ArrowFlight/FlightClient.swift | 187 - .../Sources/ArrowFlight/FlightCriteria.swift | 37 - .../Sources/ArrowFlight/FlightData.swift | 49 - .../ArrowFlight/FlightDescriptor.swift | 56 - .../Sources/ArrowFlight/FlightEndpoint.swift | 38 - .../Sources/ArrowFlight/FlightInfo.swift | 57 - .../Sources/ArrowFlight/FlightLocation.swift | 36 - .../Sources/ArrowFlight/FlightPutResult.swift | 35 - .../Sources/ArrowFlight/FlightResult.swift | 35 - .../ArrowFlight/FlightSchemaResult.swift | 41 - .../Sources/ArrowFlight/FlightServer.swift | 194 - .../Sources/ArrowFlight/FlightSql.pb.swift | 5145 ----------------- .../Sources/ArrowFlight/FlightTicket.swift | 35 - .../ArrowFlight/RecordBatchStreamReader.swift | 81 - .../ArrowFlight/RecordBatchStreamWriter.swift | 96 - .../Tests/ArrowFlightTests/FlightTest.swift | 357 -- swift/CDataWGo/.gitignore | 8 - swift/CDataWGo/Package.swift | 43 - .../CDataWGo/Sources/go-swift/CDataTest.swift | 132 - swift/CDataWGo/go.mod | 38 - swift/CDataWGo/go.sum | 54 - swift/CDataWGo/include/go_swift.h | 30 - swift/CDataWGo/main.go | 127 - swift/data-generator/swift-datagen/go.mod | 38 - swift/data-generator/swift-datagen/go.sum | 54 - swift/data-generator/swift-datagen/main.go | 116 - swift/gen-flatbuffers.sh | 51 - swift/gen-protobuffers.sh | 44 - 74 files changed, 19677 deletions(-) delete mode 100644 .github/workflows/swift.yml delete mode 100644 swift/.editorconfig delete mode 100644 swift/.swiftlint.yml delete mode 100644 swift/Arrow/.gitignore delete mode 100644 swift/Arrow/Package.swift delete mode 100644 swift/Arrow/README.md delete mode 100644 swift/Arrow/Sources/Arrow/ArrowArray.swift delete mode 100644 swift/Arrow/Sources/Arrow/ArrowArrayBuilder.swift delete mode 100644 swift/Arrow/Sources/Arrow/ArrowBuffer.swift delete mode 100644 swift/Arrow/Sources/Arrow/ArrowBufferBuilder.swift delete mode 100644 swift/Arrow/Sources/Arrow/ArrowCExporter.swift delete mode 100644 swift/Arrow/Sources/Arrow/ArrowCImporter.swift delete mode 100644 swift/Arrow/Sources/Arrow/ArrowData.swift delete mode 100644 swift/Arrow/Sources/Arrow/ArrowDecoder.swift delete mode 100644 swift/Arrow/Sources/Arrow/ArrowEncoder.swift delete mode 100644 swift/Arrow/Sources/Arrow/ArrowReader.swift delete mode 100644 swift/Arrow/Sources/Arrow/ArrowReaderHelper.swift delete mode 100644 swift/Arrow/Sources/Arrow/ArrowSchema.swift delete mode 100644 swift/Arrow/Sources/Arrow/ArrowTable.swift delete mode 100644 swift/Arrow/Sources/Arrow/ArrowType.swift delete mode 100644 swift/Arrow/Sources/Arrow/ArrowWriter.swift delete mode 100644 swift/Arrow/Sources/Arrow/ArrowWriterHelper.swift delete mode 100644 swift/Arrow/Sources/Arrow/BitUtility.swift delete mode 100644 swift/Arrow/Sources/Arrow/ChunkedArray.swift delete mode 100644 swift/Arrow/Sources/Arrow/File_generated.swift delete mode 100644 swift/Arrow/Sources/Arrow/MemoryAllocator.swift delete mode 100644 swift/Arrow/Sources/Arrow/Message_generated.swift delete mode 100644 swift/Arrow/Sources/Arrow/ProtoUtil.swift delete mode 100644 swift/Arrow/Sources/Arrow/Schema_generated.swift delete mode 100644 swift/Arrow/Sources/Arrow/SparseTensor_generated.swift delete mode 100644 swift/Arrow/Sources/Arrow/Tensor_generated.swift delete mode 100644 swift/Arrow/Sources/ArrowC/ArrowCData.c delete mode 100644 swift/Arrow/Sources/ArrowC/include/ArrowCData.h delete mode 100644 swift/Arrow/Tests/ArrowTests/ArrayBuilderTest.swift delete mode 100644 swift/Arrow/Tests/ArrowTests/ArrayTests.swift delete mode 100644 swift/Arrow/Tests/ArrowTests/CDataTests.swift delete mode 100644 swift/Arrow/Tests/ArrowTests/CodableTests.swift delete mode 100644 swift/Arrow/Tests/ArrowTests/IPCTests.swift delete mode 100644 swift/Arrow/Tests/ArrowTests/RecordBatchTests.swift delete mode 100644 swift/Arrow/Tests/ArrowTests/TableTests.swift delete mode 100644 swift/ArrowFlight/.gitignore delete mode 100644 swift/ArrowFlight/Package.swift delete mode 100644 swift/ArrowFlight/Sources/ArrowFlight/Flight.grpc.swift delete mode 100644 swift/ArrowFlight/Sources/ArrowFlight/Flight.pb.swift delete mode 100644 swift/ArrowFlight/Sources/ArrowFlight/FlightAction.swift delete mode 100644 swift/ArrowFlight/Sources/ArrowFlight/FlightActionType.swift delete mode 100644 swift/ArrowFlight/Sources/ArrowFlight/FlightClient.swift delete mode 100644 swift/ArrowFlight/Sources/ArrowFlight/FlightCriteria.swift delete mode 100644 swift/ArrowFlight/Sources/ArrowFlight/FlightData.swift delete mode 100644 swift/ArrowFlight/Sources/ArrowFlight/FlightDescriptor.swift delete mode 100644 swift/ArrowFlight/Sources/ArrowFlight/FlightEndpoint.swift delete mode 100644 swift/ArrowFlight/Sources/ArrowFlight/FlightInfo.swift delete mode 100644 swift/ArrowFlight/Sources/ArrowFlight/FlightLocation.swift delete mode 100644 swift/ArrowFlight/Sources/ArrowFlight/FlightPutResult.swift delete mode 100644 swift/ArrowFlight/Sources/ArrowFlight/FlightResult.swift delete mode 100644 swift/ArrowFlight/Sources/ArrowFlight/FlightSchemaResult.swift delete mode 100644 swift/ArrowFlight/Sources/ArrowFlight/FlightServer.swift delete mode 100644 swift/ArrowFlight/Sources/ArrowFlight/FlightSql.pb.swift delete mode 100644 swift/ArrowFlight/Sources/ArrowFlight/FlightTicket.swift delete mode 100644 swift/ArrowFlight/Sources/ArrowFlight/RecordBatchStreamReader.swift delete mode 100644 swift/ArrowFlight/Sources/ArrowFlight/RecordBatchStreamWriter.swift delete mode 100644 swift/ArrowFlight/Tests/ArrowFlightTests/FlightTest.swift delete mode 100644 swift/CDataWGo/.gitignore delete mode 100644 swift/CDataWGo/Package.swift delete mode 100644 swift/CDataWGo/Sources/go-swift/CDataTest.swift delete mode 100644 swift/CDataWGo/go.mod delete mode 100644 swift/CDataWGo/go.sum delete mode 100644 swift/CDataWGo/include/go_swift.h delete mode 100644 swift/CDataWGo/main.go delete mode 100644 swift/data-generator/swift-datagen/go.mod delete mode 100644 swift/data-generator/swift-datagen/go.sum delete mode 100644 swift/data-generator/swift-datagen/main.go delete mode 100755 swift/gen-flatbuffers.sh delete mode 100755 swift/gen-protobuffers.sh diff --git a/.github/workflows/swift.yml b/.github/workflows/swift.yml deleted file mode 100644 index 2e1268f52d6..00000000000 --- a/.github/workflows/swift.yml +++ /dev/null @@ -1,89 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -name: Swift - -on: - push: - branches: - - '**' - - '!dependabot/**' - tags: - - '**' - paths: - - '.dockerignore' - - '.github/workflows/swift.yml' - - 'ci/docker/*swift*' - - 'ci/scripts/swift_*' - - 'docker-compose.yml' - - 'swift/**' - pull_request: - paths: - - '.dockerignore' - - '.github/workflows/swift.yml' - - 'ci/docker/*swift*' - - 'ci/scripts/swift_*' - - 'docker-compose.yml' - - 'swift/**' - -concurrency: - group: ${{ github.repository }}-${{ github.head_ref || github.sha }}-${{ github.workflow }} - cancel-in-progress: true - -permissions: - contents: read - -env: - ARCHERY_DEBUG: 1 - DOCKER_VOLUME_PREFIX: ".docker/" - -jobs: - docker: - name: AMD 64 Ubuntu Swift 5.10 - runs-on: ubuntu-latest - if: ${{ !contains(github.event.pull_request.title, 'WIP') }} - timeout-minutes: 15 - steps: - - name: Checkout Arrow - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4.2.2 - with: - fetch-depth: 0 - submodules: recursive - - name: Setup Python on hosted runner - uses: actions/setup-python@a26af69be951a213d495a4c3e4e4022e16d87065 # v5.6.0 - with: - python-version: 3 - - name: Setup Archery - run: pip install -e dev/archery[docker] - - name: Execute Docker Build - env: - ARCHERY_DOCKER_USER: ${{ secrets.DOCKERHUB_USER }} - ARCHERY_DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }} - run: | - source ci/scripts/util_enable_core_dumps.sh - archery docker run ubuntu-swift - - name: Docker Push - if: >- - success() && - github.event_name == 'push' && - github.repository == 'apache/arrow' && - github.ref_name == 'main' - env: - ARCHERY_DOCKER_USER: ${{ secrets.DOCKERHUB_USER }} - ARCHERY_DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }} - continue-on-error: true - run: archery docker push ubuntu-swift diff --git a/swift/.editorconfig b/swift/.editorconfig deleted file mode 100644 index ffaf7e24024..00000000000 --- a/swift/.editorconfig +++ /dev/null @@ -1,28 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# This is an EditorConfig file: https://editorconfig.org/ - -# See ../.editorconfig for inherited values - -[*.{c,cc,cpp,h,hh,hpp}] -indent_size = 4 -indent_style = space - -[*.swift] -indent_size = 4 -indent_style = space diff --git a/swift/.swiftlint.yml b/swift/.swiftlint.yml deleted file mode 100644 index 7e4da29f374..00000000000 --- a/swift/.swiftlint.yml +++ /dev/null @@ -1,38 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -included: - - Arrow/Package.swift - - Arrow/Sources - - Arrow/Tests - - ArrowFlight/Package.swift - - ArrowFlight/Sources - - ArrowFlight/Tests - - CDataWGo/Package.swift - - CDataWGo/Sources/go-swift -excluded: - - Arrow/Sources/Arrow/File_generated.swift - - Arrow/Sources/Arrow/Message_generated.swift - - Arrow/Sources/Arrow/Schema_generated.swift - - Arrow/Sources/Arrow/SparseTensor_generated.swift - - Arrow/Sources/Arrow/Tensor_generated.swift - - ArrowFlight/Sources/ArrowFlight/Flight.grpc.swift - - ArrowFlight/Sources/ArrowFlight/Flight.pb.swift - - ArrowFlight/Sources/ArrowFlight/FlightSql.pb.swift -identifier_name: - min_length: 2 # only warning -allow_zero_lintable_files: false diff --git a/swift/Arrow/.gitignore b/swift/Arrow/.gitignore deleted file mode 100644 index d561187385c..00000000000 --- a/swift/Arrow/.gitignore +++ /dev/null @@ -1,9 +0,0 @@ -.DS_Store -/.build -/Packages -/*.xcodeproj -xcuserdata/ -DerivedData/ -.swiftpm/ -.netrc -Package.resolved \ No newline at end of file diff --git a/swift/Arrow/Package.swift b/swift/Arrow/Package.swift deleted file mode 100644 index d9e328f8eea..00000000000 --- a/swift/Arrow/Package.swift +++ /dev/null @@ -1,67 +0,0 @@ -// swift-tools-version: 5.10 -// The swift-tools-version declares the minimum version of Swift required to build this package. - -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -import PackageDescription - -let package = Package( - name: "Arrow", - platforms: [ - .macOS(.v10_14) - ], - products: [ - .library( - name: "Arrow", - targets: ["Arrow"]) - ], - dependencies: [ - .package(url: "https://github.com/google/flatbuffers.git", branch: "v25.2.10"), - .package( - url: "https://github.com/apple/swift-atomics.git", - .upToNextMajor(from: "1.2.0") // or `.upToNextMinor - ) - ], - targets: [ - .target( - name: "ArrowC", - path: "Sources/ArrowC", - swiftSettings: [ - // build: .unsafeFlags(["-warnings-as-errors"]) - ] - - ), - .target( - name: "Arrow", - dependencies: ["ArrowC", - .product(name: "FlatBuffers", package: "flatbuffers"), - .product(name: "Atomics", package: "swift-atomics") - ], - swiftSettings: [ - // build: .unsafeFlags(["-warnings-as-errors"]) - ] - ), - .testTarget( - name: "ArrowTests", - dependencies: ["Arrow", "ArrowC"], - swiftSettings: [ - // build: .unsafeFlags(["-warnings-as-errors"]) - ] - ) - ] -) diff --git a/swift/Arrow/README.md b/swift/Arrow/README.md deleted file mode 100644 index 3acded89e1f..00000000000 --- a/swift/Arrow/README.md +++ /dev/null @@ -1,56 +0,0 @@ - - -# Apache Arrow Swift - -An implementation of Arrow targeting Swift. - -## Status - -## Memory Management - -- Allocations are 64-byte aligned and padded to 8-bytes. -- Allocations are automatically garbage collected - -## Arrays - -### Primitive Types - -- Int8, Int16, Int32, Int64 -- UInt8, UInt16, UInt32, UInt64 -- Float, Double -- String (utf-8) - -### Parametric Types - -- Date32 -- Date64 - -### Type Metadata - -- Data Types -- Fields -- Schema - -## Test data generation - -Test data files for the reader tests are generated by an executable built in go whose source is included in the data-generator directory. -```sh -$ go build -o swift-datagen -``` diff --git a/swift/Arrow/Sources/Arrow/ArrowArray.swift b/swift/Arrow/Sources/Arrow/ArrowArray.swift deleted file mode 100644 index 4fc1b8b9fc7..00000000000 --- a/swift/Arrow/Sources/Arrow/ArrowArray.swift +++ /dev/null @@ -1,327 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -import Foundation - -public protocol ArrowArrayHolder { - var type: ArrowType {get} - var length: UInt {get} - var nullCount: UInt {get} - var array: AnyArray {get} - var data: ArrowData {get} - var getBufferData: () -> [Data] {get} - var getBufferDataSizes: () -> [Int] {get} - var getArrowColumn: (ArrowField, [ArrowArrayHolder]) throws -> ArrowColumn {get} -} - -public class ArrowArrayHolderImpl: ArrowArrayHolder { - public let data: ArrowData - public let type: ArrowType - public let length: UInt - public let nullCount: UInt - public let array: AnyArray - public let getBufferData: () -> [Data] - public let getBufferDataSizes: () -> [Int] - public let getArrowColumn: (ArrowField, [ArrowArrayHolder]) throws -> ArrowColumn - public init(_ arrowArray: ArrowArray) { - self.array = arrowArray - self.data = arrowArray.arrowData - self.length = arrowArray.length - self.type = arrowArray.arrowData.type - self.nullCount = arrowArray.nullCount - self.getBufferData = {() -> [Data] in - var bufferData = [Data]() - for buffer in arrowArray.arrowData.buffers { - bufferData.append(Data()) - buffer.append(to: &bufferData[bufferData.count - 1]) - } - - return bufferData - } - - self.getBufferDataSizes = {() -> [Int] in - var bufferDataSizes = [Int]() - for buffer in arrowArray.arrowData.buffers { - bufferDataSizes.append(Int(buffer.capacity)) - } - - return bufferDataSizes - } - - self.getArrowColumn = {(field: ArrowField, arrayHolders: [ArrowArrayHolder]) throws -> ArrowColumn in - var arrays = [ArrowArray]() - for arrayHolder in arrayHolders { - if let array = arrayHolder.array as? ArrowArray { - arrays.append(array) - } - } - - return ArrowColumn(field, chunked: ChunkedArrayHolder(try ChunkedArray(arrays))) - } - } - - public static func loadArray( // swiftlint:disable:this cyclomatic_complexity - _ arrowType: ArrowType, with: ArrowData) throws -> ArrowArrayHolder { - switch arrowType.id { - case .int8: - return try ArrowArrayHolderImpl(FixedArray(with)) - case .int16: - return try ArrowArrayHolderImpl(FixedArray(with)) - case .int32: - return try ArrowArrayHolderImpl(FixedArray(with)) - case .int64: - return try ArrowArrayHolderImpl(FixedArray(with)) - case .uint8: - return try ArrowArrayHolderImpl(FixedArray(with)) - case .uint16: - return try ArrowArrayHolderImpl(FixedArray(with)) - case .uint32: - return try ArrowArrayHolderImpl(FixedArray(with)) - case .uint64: - return try ArrowArrayHolderImpl(FixedArray(with)) - case .double: - return try ArrowArrayHolderImpl(FixedArray(with)) - case .float: - return try ArrowArrayHolderImpl(FixedArray(with)) - case .date32: - return try ArrowArrayHolderImpl(Date32Array(with)) - case .date64: - return try ArrowArrayHolderImpl(Date64Array(with)) - case .time32: - return try ArrowArrayHolderImpl(Time32Array(with)) - case .time64: - return try ArrowArrayHolderImpl(Time64Array(with)) - case .string: - return try ArrowArrayHolderImpl(StringArray(with)) - case .boolean: - return try ArrowArrayHolderImpl(BoolArray(with)) - case .binary: - return try ArrowArrayHolderImpl(BinaryArray(with)) - case .strct: - return try ArrowArrayHolderImpl(StructArray(with)) - default: - throw ArrowError.invalid("Array not found for type: \(arrowType)") - } - } -} - -public class ArrowArray: AsString, AnyArray { - public typealias ItemType = T - public let arrowData: ArrowData - public var nullCount: UInt {return self.arrowData.nullCount} - public var length: UInt {return self.arrowData.length} - - public required init(_ arrowData: ArrowData) throws { - self.arrowData = arrowData - } - - public func isNull(_ at: UInt) throws -> Bool { - if at >= self.length { - throw ArrowError.outOfBounds(index: Int64(at)) - } - - return self.arrowData.isNull(at) - } - - public subscript(_ index: UInt) -> T? { - fatalError("subscript() has not been implemented") - } - - public func asString(_ index: UInt) -> String { - if self[index] == nil { - return "" - } - - return "\(self[index]!)" - } - - public func asAny(_ index: UInt) -> Any? { - if self[index] == nil { - return nil - } - - return self[index]! - } -} - -public class FixedArray: ArrowArray { - public override subscript(_ index: UInt) -> T? { - if self.arrowData.isNull(index) { - return nil - } - - let byteOffset = self.arrowData.stride * Int(index) - return self.arrowData.buffers[1].rawPointer.advanced(by: byteOffset).load(as: T.self) - } -} - -public class StringArray: ArrowArray { - public override subscript(_ index: UInt) -> String? { - let offsetIndex = MemoryLayout.stride * Int(index) - if self.arrowData.isNull(index) { - return nil - } - - let offsets = self.arrowData.buffers[1] - let values = self.arrowData.buffers[2] - - var startIndex: Int32 = 0 - if index > 0 { - startIndex = offsets.rawPointer.advanced(by: offsetIndex).load(as: Int32.self) - } - - let endIndex = offsets.rawPointer.advanced(by: offsetIndex + MemoryLayout.stride ) - .load(as: Int32.self) - let arrayLength = Int(endIndex - startIndex) - let rawPointer = values.rawPointer.advanced(by: Int(startIndex)) - .bindMemory(to: UInt8.self, capacity: arrayLength) - let buffer = UnsafeBufferPointer(start: rawPointer, count: arrayLength) - let byteArray = Array(buffer) - return String(data: Data(byteArray), encoding: .utf8) - } -} - -public class BoolArray: ArrowArray { - public override subscript(_ index: UInt) -> Bool? { - if self.arrowData.isNull(index) { - return nil - } - - let valueBuffer = self.arrowData.buffers[1] - return BitUtility.isSet(index, buffer: valueBuffer) - } -} - -public class Date32Array: ArrowArray { - public override subscript(_ index: UInt) -> Date? { - if self.arrowData.isNull(index) { - return nil - } - - let byteOffset = self.arrowData.stride * Int(index) - let milliseconds = self.arrowData.buffers[1].rawPointer.advanced(by: byteOffset).load(as: UInt32.self) - return Date(timeIntervalSince1970: TimeInterval(milliseconds * 86400)) - } -} - -public class Date64Array: ArrowArray { - public override subscript(_ index: UInt) -> Date? { - if self.arrowData.isNull(index) { - return nil - } - - let byteOffset = self.arrowData.stride * Int(index) - let milliseconds = self.arrowData.buffers[1].rawPointer.advanced(by: byteOffset).load(as: UInt64.self) - return Date(timeIntervalSince1970: TimeInterval(milliseconds / 1000)) - } -} - -public class Time32Array: FixedArray {} -public class Time64Array: FixedArray {} - -public class BinaryArray: ArrowArray { - public struct Options { - public var printAsHex = false - public var printEncoding: String.Encoding = .utf8 - } - - public var options = Options() - - public override subscript(_ index: UInt) -> Data? { - let offsetIndex = MemoryLayout.stride * Int(index) - if self.arrowData.isNull(index) { - return nil - } - - let offsets = self.arrowData.buffers[1] - let values = self.arrowData.buffers[2] - var startIndex: Int32 = 0 - if index > 0 { - startIndex = offsets.rawPointer.advanced(by: offsetIndex).load(as: Int32.self) - } - - let endIndex = offsets.rawPointer.advanced(by: offsetIndex + MemoryLayout.stride ) - .load(as: Int32.self) - let arrayLength = Int(endIndex - startIndex) - let rawPointer = values.rawPointer.advanced(by: Int(startIndex)) - .bindMemory(to: UInt8.self, capacity: arrayLength) - let buffer = UnsafeBufferPointer(start: rawPointer, count: arrayLength) - let byteArray = Array(buffer) - return Data(byteArray) - } - - public override func asString(_ index: UInt) -> String { - if self[index] == nil {return ""} - let data = self[index]! - if options.printAsHex { - return data.hexEncodedString() - } else { - return String(data: data, encoding: .utf8)! - } - } -} - -public class StructArray: ArrowArray<[Any?]> { - public private(set) var arrowFields: [ArrowArrayHolder]? - public required init(_ arrowData: ArrowData) throws { - try super.init(arrowData) - var fields = [ArrowArrayHolder]() - for child in arrowData.children { - fields.append(try ArrowArrayHolderImpl.loadArray(child.type, with: child)) - } - - self.arrowFields = fields - } - - public override subscript(_ index: UInt) -> [Any?]? { - if self.arrowData.isNull(index) { - return nil - } - - if let fields = arrowFields { - var result = [Any?]() - for field in fields { - result.append(field.array.asAny(index)) - } - - return result - } - - return nil - } - - public override func asString(_ index: UInt) -> String { - if self.arrowData.isNull(index) { - return "" - } - - var output = "{" - if let fields = arrowFields { - for fieldIndex in 0.. ArrowArrayHolder - func appendAny(_ val: Any?) -} - -public class ArrowArrayBuilder>: ArrowArrayHolderBuilder { - let type: ArrowType - let bufferBuilder: T - public var length: UInt {return self.bufferBuilder.length} - public var capacity: UInt {return self.bufferBuilder.capacity} - public var nullCount: UInt {return self.bufferBuilder.nullCount} - public var offset: UInt {return self.bufferBuilder.offset} - - fileprivate init(_ type: ArrowType) throws { - self.type = type - self.bufferBuilder = try T() - } - - public func append(_ vals: T.ItemType?...) { - for val in vals { - self.bufferBuilder.append(val) - } - } - - public func append(_ vals: [T.ItemType?]) { - for val in vals { - self.bufferBuilder.append(val) - } - } - - public func append(_ val: T.ItemType?) { - self.bufferBuilder.append(val) - } - - public func appendAny(_ val: Any?) { - self.bufferBuilder.append(val as? T.ItemType) - } - - public func finish() throws -> ArrowArray { - let buffers = self.bufferBuilder.finish() - let arrowData = try ArrowData(self.type, buffers: buffers, nullCount: self.nullCount) - let array = try U(arrowData) - return array - } - - public func getStride() -> Int { - return self.type.getStride() - } - - public func toHolder() throws -> ArrowArrayHolder { - return try ArrowArrayHolderImpl(self.finish()) - } -} - -public class NumberArrayBuilder: ArrowArrayBuilder, FixedArray> { - fileprivate convenience init() throws { - try self.init(ArrowType(ArrowType.infoForNumericType(T.self))) - } -} - -public class StringArrayBuilder: ArrowArrayBuilder, StringArray> { - fileprivate convenience init() throws { - try self.init(ArrowType(ArrowType.ArrowString)) - } -} - -public class BinaryArrayBuilder: ArrowArrayBuilder, BinaryArray> { - fileprivate convenience init() throws { - try self.init(ArrowType(ArrowType.ArrowBinary)) - } -} - -public class BoolArrayBuilder: ArrowArrayBuilder { - fileprivate convenience init() throws { - try self.init(ArrowType(ArrowType.ArrowBool)) - } -} - -public class Date32ArrayBuilder: ArrowArrayBuilder { - fileprivate convenience init() throws { - try self.init(ArrowType(ArrowType.ArrowDate32)) - } -} - -public class Date64ArrayBuilder: ArrowArrayBuilder { - fileprivate convenience init() throws { - try self.init(ArrowType(ArrowType.ArrowDate64)) - } -} - -public class Time32ArrayBuilder: ArrowArrayBuilder, Time32Array> { - fileprivate convenience init(_ unit: ArrowTime32Unit) throws { - try self.init(ArrowTypeTime32(unit)) - } -} - -public class Time64ArrayBuilder: ArrowArrayBuilder, Time64Array> { - fileprivate convenience init(_ unit: ArrowTime64Unit) throws { - try self.init(ArrowTypeTime64(unit)) - } -} - -public class StructArrayBuilder: ArrowArrayBuilder { - let builders: [any ArrowArrayHolderBuilder] - let fields: [ArrowField] - public init(_ fields: [ArrowField], builders: [any ArrowArrayHolderBuilder]) throws { - self.fields = fields - self.builders = builders - try super.init(ArrowNestedType(ArrowType.ArrowStruct, fields: fields)) - self.bufferBuilder.initializeTypeInfo(fields) - } - - public init(_ fields: [ArrowField]) throws { - self.fields = fields - var builders = [any ArrowArrayHolderBuilder]() - for field in fields { - builders.append(try ArrowArrayBuilders.loadBuilder(arrowType: field.type)) - } - - self.builders = builders - try super.init(ArrowNestedType(ArrowType.ArrowStruct, fields: fields)) - } - - public override func append(_ values: [Any?]?) { - self.bufferBuilder.append(values) - if let anyValues = values { - for index in 0.. StructArray { - let buffers = self.bufferBuilder.finish() - var childData = [ArrowData]() - for builder in self.builders { - childData.append(try builder.toHolder().array.arrowData) - } - - let arrowData = try ArrowData(self.type, buffers: buffers, - children: childData, nullCount: self.nullCount, - length: self.length) - let structArray = try StructArray(arrowData) - return structArray - } -} - -public class ArrowArrayBuilders { - public static func loadBuilder( // swiftlint:disable:this cyclomatic_complexity - _ builderType: Any.Type) throws -> ArrowArrayHolderBuilder { - if builderType == Int8.self || builderType == Int8?.self { - return try ArrowArrayBuilders.loadNumberArrayBuilder() as NumberArrayBuilder - } else if builderType == Int16.self || builderType == Int16?.self { - return try ArrowArrayBuilders.loadNumberArrayBuilder() as NumberArrayBuilder - } else if builderType == Int32.self || builderType == Int32?.self { - return try ArrowArrayBuilders.loadNumberArrayBuilder() as NumberArrayBuilder - } else if builderType == Int64.self || builderType == Int64?.self { - return try ArrowArrayBuilders.loadNumberArrayBuilder() as NumberArrayBuilder - } else if builderType == Float.self || builderType == Float?.self { - return try ArrowArrayBuilders.loadNumberArrayBuilder() as NumberArrayBuilder - } else if builderType == UInt8.self || builderType == UInt8?.self { - return try ArrowArrayBuilders.loadNumberArrayBuilder() as NumberArrayBuilder - } else if builderType == UInt16.self || builderType == UInt16?.self { - return try ArrowArrayBuilders.loadNumberArrayBuilder() as NumberArrayBuilder - } else if builderType == UInt32.self || builderType == UInt32?.self { - return try ArrowArrayBuilders.loadNumberArrayBuilder() as NumberArrayBuilder - } else if builderType == UInt64.self || builderType == UInt64?.self { - return try ArrowArrayBuilders.loadNumberArrayBuilder() as NumberArrayBuilder - } else if builderType == Double.self || builderType == Double?.self { - return try ArrowArrayBuilders.loadNumberArrayBuilder() as NumberArrayBuilder - } else if builderType == String.self || builderType == String?.self { - return try ArrowArrayBuilders.loadStringArrayBuilder() - } else if builderType == Bool.self || builderType == Bool?.self { - return try ArrowArrayBuilders.loadBoolArrayBuilder() - } else if builderType == Date.self || builderType == Date?.self { - return try ArrowArrayBuilders.loadDate64ArrayBuilder() - } else { - throw ArrowError.invalid("Invalid type for builder: \(builderType)") - } - } - - public static func isValidBuilderType(_ type: T.Type) -> Bool { - return type == Int8?.self || type == Int16?.self || - type == Int32?.self || type == Int64?.self || - type == UInt8?.self || type == UInt16?.self || - type == UInt32?.self || type == UInt64?.self || - type == String?.self || type == Double?.self || - type == Float?.self || type == Date?.self || - type == Bool?.self || type == Bool.self || - type == Int8.self || type == Int16.self || - type == Int32.self || type == Int64.self || - type == UInt8.self || type == UInt16.self || - type == UInt32.self || type == UInt64.self || - type == String.self || type == Double.self || - type == Float.self || type == Date.self - } - - public static func loadStructArrayBuilderForType(_ obj: T) throws -> StructArrayBuilder { - let mirror = Mirror(reflecting: obj) - var builders = [ArrowArrayHolderBuilder]() - var fields = [ArrowField]() - for (property, value) in mirror.children { - guard let propertyName = property else { - continue - } - - let builderType = type(of: value) - let arrowType = ArrowType(ArrowType.infoForType(builderType)) - fields.append(ArrowField(propertyName, type: arrowType, isNullable: true)) - builders.append(try loadBuilder(arrowType: arrowType)) - } - - return try StructArrayBuilder(fields, builders: builders) - } - - public static func loadBuilder( // swiftlint:disable:this cyclomatic_complexity - arrowType: ArrowType) throws -> ArrowArrayHolderBuilder { - switch arrowType.id { - case .uint8: - return try loadNumberArrayBuilder() as NumberArrayBuilder - case .uint16: - return try loadNumberArrayBuilder() as NumberArrayBuilder - case .uint32: - return try loadNumberArrayBuilder() as NumberArrayBuilder - case .uint64: - return try loadNumberArrayBuilder() as NumberArrayBuilder - case .int8: - return try loadNumberArrayBuilder() as NumberArrayBuilder - case .int16: - return try loadNumberArrayBuilder() as NumberArrayBuilder - case .int32: - return try loadNumberArrayBuilder() as NumberArrayBuilder - case .int64: - return try loadNumberArrayBuilder() as NumberArrayBuilder - case .double: - return try loadNumberArrayBuilder() as NumberArrayBuilder - case .float: - return try loadNumberArrayBuilder() as NumberArrayBuilder - case .string: - return try StringArrayBuilder() - case .boolean: - return try BoolArrayBuilder() - case .binary: - return try BinaryArrayBuilder() - case .date32: - return try Date32ArrayBuilder() - case .date64: - return try Date64ArrayBuilder() - case .time32: - guard let timeType = arrowType as? ArrowTypeTime32 else { - throw ArrowError.invalid("Expected arrow type for \(arrowType.id) not found") - } - return try Time32ArrayBuilder(timeType.unit) - case .time64: - guard let timeType = arrowType as? ArrowTypeTime64 else { - throw ArrowError.invalid("Expected arrow type for \(arrowType.id) not found") - } - return try Time64ArrayBuilder(timeType.unit) - default: - throw ArrowError.unknownType("Builder not found for arrow type: \(arrowType.id)") - } - } - - public static func loadNumberArrayBuilder() throws -> NumberArrayBuilder { - let type = T.self - if type == Int8.self { - return try NumberArrayBuilder() - } else if type == Int16.self { - return try NumberArrayBuilder() - } else if type == Int32.self { - return try NumberArrayBuilder() - } else if type == Int64.self { - return try NumberArrayBuilder() - } else if type == UInt8.self { - return try NumberArrayBuilder() - } else if type == UInt16.self { - return try NumberArrayBuilder() - } else if type == UInt32.self { - return try NumberArrayBuilder() - } else if type == UInt64.self { - return try NumberArrayBuilder() - } else if type == Float.self { - return try NumberArrayBuilder() - } else if type == Double.self { - return try NumberArrayBuilder() - } else { - throw ArrowError.unknownType("Type is invalid for NumberArrayBuilder") - } - } - - public static func loadStringArrayBuilder() throws -> StringArrayBuilder { - return try StringArrayBuilder() - } - - public static func loadBoolArrayBuilder() throws -> BoolArrayBuilder { - return try BoolArrayBuilder() - } - - public static func loadDate32ArrayBuilder() throws -> Date32ArrayBuilder { - return try Date32ArrayBuilder() - } - - public static func loadDate64ArrayBuilder() throws -> Date64ArrayBuilder { - return try Date64ArrayBuilder() - } - - public static func loadBinaryArrayBuilder() throws -> BinaryArrayBuilder { - return try BinaryArrayBuilder() - } - - public static func loadTime32ArrayBuilder(_ unit: ArrowTime32Unit) throws -> Time32ArrayBuilder { - return try Time32ArrayBuilder(unit) - } - - public static func loadTime64ArrayBuilder(_ unit: ArrowTime64Unit) throws -> Time64ArrayBuilder { - return try Time64ArrayBuilder(unit) - } -} diff --git a/swift/Arrow/Sources/Arrow/ArrowBuffer.swift b/swift/Arrow/Sources/Arrow/ArrowBuffer.swift deleted file mode 100644 index 1ff53cd7dd5..00000000000 --- a/swift/Arrow/Sources/Arrow/ArrowBuffer.swift +++ /dev/null @@ -1,88 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -import Foundation - -public class ArrowBuffer { - static let minLength: UInt = 1 << 5 - static let maxLength = UInt.max - fileprivate(set) var length: UInt - let capacity: UInt - public let rawPointer: UnsafeMutableRawPointer - let isMemoryOwner: Bool - - init(length: UInt, capacity: UInt, rawPointer: UnsafeMutableRawPointer, isMemoryOwner: Bool = true) { - self.length = length - self.capacity = capacity - self.rawPointer = rawPointer - self.isMemoryOwner = isMemoryOwner - } - - deinit { - if isMemoryOwner { - self.rawPointer.deallocate() - } - } - - func append(to data: inout Data) { - let ptr = UnsafePointer(rawPointer.assumingMemoryBound(to: UInt8.self)) - data.append(ptr, count: Int(capacity)) - } - - static func createEmptyBuffer() -> ArrowBuffer { - return ArrowBuffer( - length: 0, - capacity: 0, - rawPointer: UnsafeMutableRawPointer.allocate(byteCount: 0, alignment: .zero)) - } - - static func createBuffer(_ data: [UInt8], length: UInt) -> ArrowBuffer { - let byteCount = UInt(data.count) - let capacity = alignTo64(byteCount) - let memory = MemoryAllocator(64) - let rawPointer = memory.allocateArray(Int(capacity)) - rawPointer.copyMemory(from: data, byteCount: data.count) - return ArrowBuffer(length: length, capacity: capacity, rawPointer: rawPointer) - } - - static func createBuffer(_ length: UInt, size: UInt, doAlign: Bool = true) -> ArrowBuffer { - let actualLen = max(length, ArrowBuffer.minLength) - let byteCount = size * actualLen - var capacity = byteCount - if doAlign { - capacity = alignTo64(byteCount) - } - - let memory = MemoryAllocator(64) - let rawPointer = memory.allocateArray(Int(capacity)) - rawPointer.initializeMemory(as: UInt8.self, repeating: 0, count: Int(capacity)) - return ArrowBuffer(length: length, capacity: capacity, rawPointer: rawPointer) - } - - static func copyCurrent(_ from: ArrowBuffer, to: inout ArrowBuffer, len: UInt) { - to.rawPointer.copyMemory(from: from.rawPointer, byteCount: Int(len)) - } - - private static func alignTo64(_ length: UInt) -> UInt { - let bufAlignment = length % 64 - if bufAlignment != 0 { - return length + (64 - bufAlignment) + 8 - } - - return length + 8 - } -} diff --git a/swift/Arrow/Sources/Arrow/ArrowBufferBuilder.swift b/swift/Arrow/Sources/Arrow/ArrowBufferBuilder.swift deleted file mode 100644 index 47f9c40354b..00000000000 --- a/swift/Arrow/Sources/Arrow/ArrowBufferBuilder.swift +++ /dev/null @@ -1,381 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -import Foundation - -public protocol ArrowBufferBuilder { - associatedtype ItemType - var capacity: UInt {get} - var length: UInt {get} - var nullCount: UInt {get} - var offset: UInt {get} - init() throws - func append(_ newValue: ItemType?) - func isNull(_ index: UInt) -> Bool - func resize(_ length: UInt) - func finish() -> [ArrowBuffer] -} - -public class BaseBufferBuilder { - var nulls: ArrowBuffer - public var offset: UInt = 0 - public var capacity: UInt {return self.nulls.capacity} - public var length: UInt = 0 - public var nullCount: UInt = 0 - - init(_ nulls: ArrowBuffer) { - self.nulls = nulls - } - - public func isNull(_ index: UInt) -> Bool { - return self.nulls.length == 0 || BitUtility.isSet(index + self.offset, buffer: self.nulls) - } - - func resizeLength(_ data: ArrowBuffer, len: UInt = 0) -> UInt { - if len == 0 || len < data.length * 2 { - if data.length == 0 || data.length * 2 < ArrowBuffer.minLength { - return ArrowBuffer.minLength - } - return UInt(data.length * 2) - } - - return UInt(len * 2) - } -} - -public class ValuesBufferBuilder: BaseBufferBuilder { - var values: ArrowBuffer - var stride: Int - public override var capacity: UInt {return self.values.capacity} - - init(values: ArrowBuffer, nulls: ArrowBuffer, stride: Int = MemoryLayout.stride) { - self.stride = stride - self.values = values - super.init(nulls) - } -} - -public class FixedBufferBuilder: ValuesBufferBuilder, ArrowBufferBuilder { - public typealias ItemType = T - private let defaultVal: ItemType - public required init() throws { - self.defaultVal = try FixedBufferBuilder.defaultValueForType() - let values = ArrowBuffer.createBuffer(0, size: UInt(MemoryLayout.stride)) - let nulls = ArrowBuffer.createBuffer(0, size: UInt(MemoryLayout.stride)) - super.init(values: values, nulls: nulls) - } - - public func append(_ newValue: ItemType?) { - let index = UInt(self.length) - let byteIndex = self.stride * Int(index) - self.length += 1 - if length > self.values.length { - self.resize(length) - } - - if let val = newValue { - BitUtility.setBit(index + self.offset, buffer: self.nulls) - self.values.rawPointer.advanced(by: byteIndex).storeBytes(of: val, as: T.self) - } else { - self.nullCount += 1 - BitUtility.clearBit(index + self.offset, buffer: self.nulls) - self.values.rawPointer.advanced(by: byteIndex).storeBytes(of: defaultVal, as: T.self) - } - } - - public func resize(_ length: UInt) { - if length > self.values.length { - let resizeLength = resizeLength(self.values) - var values = ArrowBuffer.createBuffer(resizeLength, size: UInt(MemoryLayout.size)) - var nulls = ArrowBuffer.createBuffer(resizeLength/8 + 1, size: UInt(MemoryLayout.size)) - ArrowBuffer.copyCurrent(self.values, to: &values, len: self.values.capacity) - ArrowBuffer.copyCurrent(self.nulls, to: &nulls, len: self.nulls.capacity) - self.values = values - self.nulls = nulls - } - } - - public func finish() -> [ArrowBuffer] { - let length = self.length - var values = ArrowBuffer.createBuffer(length, size: UInt(MemoryLayout.size)) - var nulls = ArrowBuffer.createBuffer(length/8 + 1, size: UInt(MemoryLayout.size)) - ArrowBuffer.copyCurrent(self.values, to: &values, len: values.capacity) - ArrowBuffer.copyCurrent(self.nulls, to: &nulls, len: nulls.capacity) - return [nulls, values] - } - - fileprivate static func defaultValueForType() throws -> T { - let type = T.self - if type == Int8.self { - return Int8(0) as! T // swiftlint:disable:this force_cast - } else if type == Int16.self { - return Int16(0) as! T // swiftlint:disable:this force_cast - } else if type == Int32.self { - return Int32(0) as! T // swiftlint:disable:this force_cast - } else if type == Int64.self { - return Int64(0) as! T // swiftlint:disable:this force_cast - } else if type == UInt8.self { - return UInt8(0) as! T // swiftlint:disable:this force_cast - } else if type == UInt16.self { - return UInt16(0) as! T // swiftlint:disable:this force_cast - } else if type == UInt32.self { - return UInt32(0) as! T // swiftlint:disable:this force_cast - } else if type == UInt64.self { - return UInt64(0) as! T // swiftlint:disable:this force_cast - } else if type == Float.self { - return Float(0) as! T // swiftlint:disable:this force_cast - } else if type == Double.self { - return Double(0) as! T // swiftlint:disable:this force_cast - } - - throw ArrowError.unknownType("Unable to determine default value") - } -} - -public class BoolBufferBuilder: ValuesBufferBuilder, ArrowBufferBuilder { - public typealias ItemType = Bool - public required init() throws { - let values = ArrowBuffer.createBuffer(0, size: UInt(MemoryLayout.stride)) - let nulls = ArrowBuffer.createBuffer(0, size: UInt(MemoryLayout.stride)) - super.init(values: values, nulls: nulls) - } - - public func append(_ newValue: ItemType?) { - let index = UInt(self.length) - self.length += 1 - if (length/8) > self.values.length { - self.resize(length) - } - - if newValue != nil { - BitUtility.setBit(index + self.offset, buffer: self.nulls) - if newValue == true { - BitUtility.setBit(index + self.offset, buffer: self.values) - } else { - BitUtility.clearBit(index + self.offset, buffer: self.values) - } - - } else { - self.nullCount += 1 - BitUtility.clearBit(index + self.offset, buffer: self.nulls) - BitUtility.clearBit(index + self.offset, buffer: self.values) - } - } - - public func resize(_ length: UInt) { - if (length/8) > self.values.length { - let resizeLength = resizeLength(self.values) - var values = ArrowBuffer.createBuffer(resizeLength, size: UInt(MemoryLayout.size)) - var nulls = ArrowBuffer.createBuffer(resizeLength, size: UInt(MemoryLayout.size)) - ArrowBuffer.copyCurrent(self.values, to: &values, len: self.values.capacity) - ArrowBuffer.copyCurrent(self.nulls, to: &nulls, len: self.nulls.capacity) - self.values = values - self.nulls = nulls - } - } - - public func finish() -> [ArrowBuffer] { - let length = self.length - var values = ArrowBuffer.createBuffer(length, size: UInt(MemoryLayout.size)) - var nulls = ArrowBuffer.createBuffer(length, size: UInt(MemoryLayout.size)) - ArrowBuffer.copyCurrent(self.values, to: &values, len: values.capacity) - ArrowBuffer.copyCurrent(self.nulls, to: &nulls, len: nulls.capacity) - return [nulls, values] - } -} - -public class VariableBufferBuilder: ValuesBufferBuilder, ArrowBufferBuilder { - public typealias ItemType = T - var offsets: ArrowBuffer - let binaryStride = MemoryLayout.stride - public required init() throws { - let values = ArrowBuffer.createBuffer(0, size: UInt(binaryStride)) - let nulls = ArrowBuffer.createBuffer(0, size: UInt(binaryStride)) - self.offsets = ArrowBuffer.createBuffer(0, size: UInt(MemoryLayout.stride)) - super.init(values: values, nulls: nulls, stride: binaryStride) - } - - public func append(_ newValue: ItemType?) { - let index = UInt(self.length) - self.length += 1 - let offsetIndex = MemoryLayout.stride * Int(index) - if self.length >= self.offsets.length { - self.resize(UInt( self.offsets.length + 1)) - } - var binData: Data - var isNull = false - if let val = newValue { - binData = getBytesFor(val)! - } else { - var nullVal = 0 - isNull = true - binData = Data(bytes: &nullVal, count: MemoryLayout.size) - } - - var currentIndex: Int32 = 0 - var currentOffset: Int32 = Int32(binData.count) - if index > 0 { - currentIndex = self.offsets.rawPointer.advanced(by: offsetIndex).load(as: Int32.self) - currentOffset += currentIndex - if currentOffset > self.values.length { - self.value_resize(UInt(currentOffset)) - } - } - - if isNull { - self.nullCount += 1 - BitUtility.clearBit(index + self.offset, buffer: self.nulls) - } else { - BitUtility.setBit(index + self.offset, buffer: self.nulls) - } - - binData.withUnsafeBytes { bufferPointer in - let rawPointer = bufferPointer.baseAddress! - self.values.rawPointer.advanced(by: Int(currentIndex)) - .copyMemory(from: rawPointer, byteCount: binData.count) - } - - self.offsets.rawPointer.advanced(by: (offsetIndex + MemoryLayout.stride)) - .storeBytes(of: currentOffset, as: Int32.self) - } - - public func value_resize(_ length: UInt) { - if length > self.values.length { - let resizeLength = resizeLength(self.values, len: length) - var values = ArrowBuffer.createBuffer(resizeLength, size: UInt(MemoryLayout.size)) - ArrowBuffer.copyCurrent(self.values, to: &values, len: self.values.capacity) - self.values = values - } - } - - public func resize(_ length: UInt) { - if length > self.offsets.length { - let resizeLength = resizeLength(self.offsets, len: length) - var nulls = ArrowBuffer.createBuffer(resizeLength/8 + 1, size: UInt(MemoryLayout.size)) - var offsets = ArrowBuffer.createBuffer(resizeLength, size: UInt(MemoryLayout.size)) - ArrowBuffer.copyCurrent(self.nulls, to: &nulls, len: self.nulls.capacity) - ArrowBuffer.copyCurrent(self.offsets, to: &offsets, len: self.offsets.capacity) - self.nulls = nulls - self.offsets = offsets - } - } - - public func finish() -> [ArrowBuffer] { - let length = self.length - var values = ArrowBuffer.createBuffer(self.values.length, size: UInt(MemoryLayout.size)) - var nulls = ArrowBuffer.createBuffer(length/8 + 1, size: UInt(MemoryLayout.size)) - var offsets = ArrowBuffer.createBuffer(length, size: UInt(MemoryLayout.size)) - ArrowBuffer.copyCurrent(self.values, to: &values, len: values.capacity) - ArrowBuffer.copyCurrent(self.nulls, to: &nulls, len: nulls.capacity) - ArrowBuffer.copyCurrent(self.offsets, to: &offsets, len: offsets.capacity) - return [nulls, offsets, values] - } -} - -public class AbstractWrapperBufferBuilder: ArrowBufferBuilder { - public typealias ItemType = T - public var capacity: UInt {return self.bufferBuilder.capacity} - public var length: UInt {return self.bufferBuilder.length} - public var nullCount: UInt {return self.bufferBuilder.nullCount} - public var offset: UInt {return self.bufferBuilder.offset} - let bufferBuilder: FixedBufferBuilder - public required init() throws { - self.bufferBuilder = try FixedBufferBuilder() - } - - public func append(_ newValue: ItemType?) { - fatalError("Method is not implemented") - } - - public func isNull(_ index: UInt) -> Bool { - return self.bufferBuilder.isNull(index) - } - - public func resize(_ length: UInt) { - self.bufferBuilder.resize(length) - } - - public func finish() -> [ArrowBuffer] { - return self.bufferBuilder.finish() - } -} - -public class Date32BufferBuilder: AbstractWrapperBufferBuilder { - public override func append(_ newValue: ItemType?) { - if let val = newValue { - let daysSinceEpoch = Int32(val.timeIntervalSince1970 / 86400) - self.bufferBuilder.append(daysSinceEpoch) - } else { - self.bufferBuilder.append(nil) - } - } -} - -public class Date64BufferBuilder: AbstractWrapperBufferBuilder { - public override func append(_ newValue: ItemType?) { - if let val = newValue { - let daysSinceEpoch = Int64(val.timeIntervalSince1970 * 1000) - self.bufferBuilder.append(daysSinceEpoch) - } else { - self.bufferBuilder.append(nil) - } - } -} - -public final class StructBufferBuilder: BaseBufferBuilder, ArrowBufferBuilder { - public typealias ItemType = [Any?] - var info: ArrowNestedType? - public init() throws { - let nulls = ArrowBuffer.createBuffer(0, size: UInt(MemoryLayout.stride)) - super.init(nulls) - } - - public func initializeTypeInfo(_ fields: [ArrowField]) { - info = ArrowNestedType(ArrowType.ArrowStruct, fields: fields) - } - - public func append(_ newValue: [Any?]?) { - let index = UInt(self.length) - self.length += 1 - if length > self.nulls.length { - self.resize(length) - } - - if newValue != nil { - BitUtility.setBit(index + self.offset, buffer: self.nulls) - } else { - self.nullCount += 1 - BitUtility.clearBit(index + self.offset, buffer: self.nulls) - } - } - - public func resize(_ length: UInt) { - if length > self.nulls.length { - let resizeLength = resizeLength(self.nulls) - var nulls = ArrowBuffer.createBuffer(resizeLength/8 + 1, size: UInt(MemoryLayout.size)) - ArrowBuffer.copyCurrent(self.nulls, to: &nulls, len: self.nulls.capacity) - self.nulls = nulls - } - } - - public func finish() -> [ArrowBuffer] { - let length = self.length - var nulls = ArrowBuffer.createBuffer(length/8 + 1, size: UInt(MemoryLayout.size)) - ArrowBuffer.copyCurrent(self.nulls, to: &nulls, len: nulls.capacity) - return [nulls] - } -} diff --git a/swift/Arrow/Sources/Arrow/ArrowCExporter.swift b/swift/Arrow/Sources/Arrow/ArrowCExporter.swift deleted file mode 100644 index f2dd8ab56d8..00000000000 --- a/swift/Arrow/Sources/Arrow/ArrowCExporter.swift +++ /dev/null @@ -1,141 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -import Foundation -import ArrowC -import Atomics - -// The memory used by UnsafeAtomic is not automatically -// reclaimed. Since this value is initialized once -// and used until the program/app is closed it's -// memory will be released on program/app exit -let exportDataCounter: UnsafeAtomic = .create(0) - -public class ArrowCExporter { - private class ExportData { - let id: Int - init() { - id = exportDataCounter.loadThenWrappingIncrement(ordering: .relaxed) - ArrowCExporter.exportedData[id] = self - } - } - - private class ExportSchema: ExportData { - public let arrowTypeNameCstr: UnsafePointer - public let nameCstr: UnsafePointer - private let arrowType: ArrowType - private let name: String - private let arrowTypeName: String - init(_ arrowType: ArrowType, name: String = "") throws { - self.arrowType = arrowType - // keeping the name str to ensure the cstring buffer remains valid - self.name = name - self.arrowTypeName = try arrowType.cDataFormatId - self.nameCstr = (self.name as NSString).utf8String! - self.arrowTypeNameCstr = (self.arrowTypeName as NSString).utf8String! - super.init() - } - } - - private class ExportArray: ExportData { - private let arrowData: ArrowData - private(set) var data = [UnsafeRawPointer?]() - private(set) var buffers: UnsafeMutablePointer - init(_ arrowData: ArrowData) { - // keep a reference to the ArrowData - // obj so the memory doesn't get - // deallocated - self.arrowData = arrowData - for arrowBuffer in arrowData.buffers { - self.data.append(arrowBuffer.rawPointer) - } - - self.buffers = UnsafeMutablePointer.allocate(capacity: self.data.count) - self.buffers.initialize(from: &self.data, count: self.data.count) - super.init() - } - - deinit { - self.buffers.deinitialize(count: self.data.count) - self.buffers.deallocate() - } - } - - private static var exportedData = [Int: ExportData]() - public init() {} - - public func exportType(_ cSchema: inout ArrowC.ArrowSchema, arrowType: ArrowType, name: String = "") -> - Result { - do { - let exportSchema = try ExportSchema(arrowType, name: name) - cSchema.format = exportSchema.arrowTypeNameCstr - cSchema.name = exportSchema.nameCstr - cSchema.private_data = - UnsafeMutableRawPointer(mutating: UnsafeRawPointer(bitPattern: exportSchema.id)) - cSchema.release = {(data: UnsafeMutablePointer?) in - let arraySchema = data!.pointee - let exportId = Int(bitPattern: arraySchema.private_data) - guard ArrowCExporter.exportedData[exportId] != nil else { - fatalError("Export schema not found with id \(exportId)") - } - - // the data associated with this exportSchema object - // which includes the C strings for the format and name - // be deallocated upon removal - ArrowCExporter.exportedData.removeValue(forKey: exportId) - ArrowC.ArrowSwiftClearReleaseSchema(data) - } - } catch { - return .failure(.unknownError("\(error)")) - } - return .success(true) - } - - public func exportField(_ schema: inout ArrowC.ArrowSchema, field: ArrowField) -> - Result { - return exportType(&schema, arrowType: field.type, name: field.name) - } - - public func exportArray(_ cArray: inout ArrowC.ArrowArray, arrowData: ArrowData) { - let exportArray = ExportArray(arrowData) - cArray.buffers = exportArray.buffers - cArray.length = Int64(arrowData.length) - cArray.null_count = Int64(arrowData.nullCount) - cArray.n_buffers = Int64(arrowData.buffers.count) - // Swift Arrow does not currently support children or dictionaries - // This will need to be updated once support has been added - cArray.n_children = 0 - cArray.children = nil - cArray.dictionary = nil - cArray.private_data = - UnsafeMutableRawPointer(mutating: UnsafeRawPointer(bitPattern: exportArray.id)) - cArray.release = {(data: UnsafeMutablePointer?) in - let arrayData = data!.pointee - let exportId = Int(bitPattern: arrayData.private_data) - guard ArrowCExporter.exportedData[exportId] != nil else { - fatalError("Export data not found with id \(exportId)") - } - - // the data associated with this exportArray object - // which includes the entire arrowData object - // and the buffers UnsafeMutablePointer[] will - // be deallocated upon removal - ArrowCExporter.exportedData.removeValue(forKey: exportId) - ArrowC.ArrowSwiftClearReleaseArray(data) - } - } -} diff --git a/swift/Arrow/Sources/Arrow/ArrowCImporter.swift b/swift/Arrow/Sources/Arrow/ArrowCImporter.swift deleted file mode 100644 index e65d78d730b..00000000000 --- a/swift/Arrow/Sources/Arrow/ArrowCImporter.swift +++ /dev/null @@ -1,180 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -import Foundation -import ArrowC - -public class ImportArrayHolder: ArrowArrayHolder { - let cArrayPtr: UnsafePointer - public var type: ArrowType {self.holder.type} - public var length: UInt {self.holder.length} - public var nullCount: UInt {self.holder.nullCount} - public var array: AnyArray {self.holder.array} - public var data: ArrowData {self.holder.data} - public var getBufferData: () -> [Data] {self.holder.getBufferData} - public var getBufferDataSizes: () -> [Int] {self.holder.getBufferDataSizes} - public var getArrowColumn: (ArrowField, [ArrowArrayHolder]) throws -> ArrowColumn {self.holder.getArrowColumn} - private let holder: ArrowArrayHolder - init(_ holder: ArrowArrayHolder, cArrayPtr: UnsafePointer) { - self.cArrayPtr = cArrayPtr - self.holder = holder - } - - deinit { - if self.cArrayPtr.pointee.release != nil { - ArrowCImporter.release(self.cArrayPtr) - } - } -} - -public class ArrowCImporter { - private func appendToBuffer( - _ cBuffer: UnsafeRawPointer?, - arrowBuffers: inout [ArrowBuffer], - length: UInt) { - if cBuffer == nil { - arrowBuffers.append(ArrowBuffer.createEmptyBuffer()) - return - } - - let pointer = UnsafeMutableRawPointer(mutating: cBuffer)! - arrowBuffers.append( - ArrowBuffer(length: length, capacity: length, rawPointer: pointer, isMemoryOwner: false)) - } - - public init() {} - - public func importType(_ cArrow: String, name: String = "") -> - Result { - do { - let type = try ArrowType.fromCDataFormatId(cArrow) - return .success(ArrowField(name, type: ArrowType(type.info), isNullable: true)) - } catch { - return .failure(.invalid("Error occurred while attempting to import type: \(error)")) - } - } - - public func importField(_ cSchema: ArrowC.ArrowSchema) -> - Result { - if cSchema.n_children > 0 { - ArrowCImporter.release(cSchema) - return .failure(.invalid("Children currently not supported")) - } else if cSchema.dictionary != nil { - ArrowCImporter.release(cSchema) - return .failure(.invalid("Dictinoary types currently not supported")) - } - - switch importType( - String(cString: cSchema.format), name: String(cString: cSchema.name)) { - case .success(let field): - ArrowCImporter.release(cSchema) - return .success(field) - case .failure(let err): - ArrowCImporter.release(cSchema) - return .failure(err) - } - } - - public func importArray( - _ cArray: UnsafePointer, - arrowType: ArrowType, - isNullable: Bool = false - ) -> Result { - let arrowField = ArrowField("", type: arrowType, isNullable: isNullable) - return importArray(cArray, arrowField: arrowField) - } - - public func importArray( // swiftlint:disable:this cyclomatic_complexity function_body_length - _ cArrayPtr: UnsafePointer, - arrowField: ArrowField - ) -> Result { - let cArray = cArrayPtr.pointee - if cArray.null_count < 0 { - ArrowCImporter.release(cArrayPtr) - return .failure(.invalid("Uncomputed null count is not supported")) - } else if cArray.n_children > 0 { - ArrowCImporter.release(cArrayPtr) - return .failure(.invalid("Children currently not supported")) - } else if cArray.dictionary != nil { - ArrowCImporter.release(cArrayPtr) - return .failure(.invalid("Dictionary types currently not supported")) - } else if cArray.offset != 0 { - ArrowCImporter.release(cArrayPtr) - return .failure(.invalid("Offset of 0 is required but found offset: \(cArray.offset)")) - } - - let arrowType = arrowField.type - let length = UInt(cArray.length) - let nullCount = UInt(cArray.null_count) - var arrowBuffers = [ArrowBuffer]() - - if cArray.n_buffers > 0 { - if cArray.buffers == nil { - ArrowCImporter.release(cArrayPtr) - return .failure(.invalid("C array buffers is nil")) - } - - switch arrowType.info { - case .variableInfo: - if cArray.n_buffers != 3 { - ArrowCImporter.release(cArrayPtr) - return .failure( - .invalid("Variable buffer count expected 3 but found \(cArray.n_buffers)")) - } - - appendToBuffer(cArray.buffers[0], arrowBuffers: &arrowBuffers, length: UInt(ceil(Double(length) / 8))) - appendToBuffer(cArray.buffers[1], arrowBuffers: &arrowBuffers, length: length) - let lastOffsetLength = cArray.buffers[1]! - .advanced(by: Int(length) * MemoryLayout.stride) - .load(as: Int32.self) - appendToBuffer(cArray.buffers[2], arrowBuffers: &arrowBuffers, length: UInt(lastOffsetLength)) - default: - if cArray.n_buffers != 2 { - ArrowCImporter.release(cArrayPtr) - return .failure(.invalid("Expected buffer count 2 but found \(cArray.n_buffers)")) - } - - appendToBuffer(cArray.buffers[0], arrowBuffers: &arrowBuffers, length: UInt(ceil(Double(length) / 8))) - appendToBuffer(cArray.buffers[1], arrowBuffers: &arrowBuffers, length: length) - } - } - - switch makeArrayHolder(arrowField, buffers: arrowBuffers, - nullCount: nullCount, children: nil, rbLength: 0) { - case .success(let holder): - return .success(ImportArrayHolder(holder, cArrayPtr: cArrayPtr)) - case .failure(let err): - ArrowCImporter.release(cArrayPtr) - return .failure(err) - } - } - - public static func release(_ cArrayPtr: UnsafePointer) { - if cArrayPtr.pointee.release != nil { - let cSchemaMutablePtr = UnsafeMutablePointer(mutating: cArrayPtr) - cArrayPtr.pointee.release(cSchemaMutablePtr) - } - } - - public static func release(_ cSchema: ArrowC.ArrowSchema) { - if cSchema.release != nil { - let cSchemaPtr = UnsafeMutablePointer.allocate(capacity: 1) - cSchemaPtr.initialize(to: cSchema) - cSchema.release(cSchemaPtr) - } - } -} diff --git a/swift/Arrow/Sources/Arrow/ArrowData.swift b/swift/Arrow/Sources/Arrow/ArrowData.swift deleted file mode 100644 index 2728b9fc8b6..00000000000 --- a/swift/Arrow/Sources/Arrow/ArrowData.swift +++ /dev/null @@ -1,67 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -import Foundation - -public class ArrowData { - public let type: ArrowType - public let buffers: [ArrowBuffer] - public let children: [ArrowData] - public let nullCount: UInt - public let length: UInt - public let stride: Int - - convenience init(_ arrowType: ArrowType, buffers: [ArrowBuffer], nullCount: UInt) throws { - try self.init(arrowType, buffers: buffers, - children: [ArrowData](), nullCount: nullCount, - length: buffers[1].length) - } - - init(_ arrowType: ArrowType, buffers: [ArrowBuffer], children: [ArrowData], nullCount: UInt, length: UInt) throws { - let infoType = arrowType.info - switch infoType { - case let .primitiveInfo(typeId): - if typeId == ArrowTypeId.unknown { - throw ArrowError.unknownType("Unknown primitive type for data") - } - case let .variableInfo(typeId): - if typeId == ArrowTypeId.unknown { - throw ArrowError.unknownType("Unknown variable type for data") - } - case let .timeInfo(typeId): - if typeId == ArrowTypeId.unknown { - throw ArrowError.unknownType("Unknown time type for data") - } - case let .complexInfo(typeId): - if typeId == ArrowTypeId.unknown { - throw ArrowError.unknownType("Unknown complex type for data") - } - } - - self.type = arrowType - self.buffers = buffers - self.children = children - self.nullCount = nullCount - self.length = length - self.stride = arrowType.getStride() - } - - public func isNull(_ at: UInt) -> Bool { - let nullBuffer = buffers[0] - return nullBuffer.length > 0 && !BitUtility.isSet(at, buffer: nullBuffer) - } -} diff --git a/swift/Arrow/Sources/Arrow/ArrowDecoder.swift b/swift/Arrow/Sources/Arrow/ArrowDecoder.swift deleted file mode 100644 index 35dd4dcd1e8..00000000000 --- a/swift/Arrow/Sources/Arrow/ArrowDecoder.swift +++ /dev/null @@ -1,376 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -import Foundation - -public class ArrowDecoder: Decoder { - var rbIndex: UInt = 0 - var singleRBCol: Int = 0 - public var codingPath: [CodingKey] = [] - public var userInfo: [CodingUserInfoKey: Any] = [:] - public let rb: RecordBatch - public let nameToCol: [String: ArrowArrayHolder] - public let columns: [ArrowArrayHolder] - public init(_ decoder: ArrowDecoder) { - self.userInfo = decoder.userInfo - self.codingPath = decoder.codingPath - self.rb = decoder.rb - self.columns = decoder.columns - self.nameToCol = decoder.nameToCol - self.rbIndex = decoder.rbIndex - } - - public init(_ rb: RecordBatch) { - self.rb = rb - var colMapping = [String: ArrowArrayHolder]() - var columns = [ArrowArrayHolder]() - for index in 0..(_ type: [T: U].Type) throws -> [T: U] { - var output = [T: U]() - if rb.columnCount != 2 { - throw ArrowError.invalid("RecordBatch column count of 2 is required to decode to map") - } - - for index in 0..(_ type: T.Type) throws -> [T] { - var output = [T]() - for index in 0..(keyedBy type: Key.Type - ) -> KeyedDecodingContainer where Key: CodingKey { - let container = ArrowKeyedDecoding(self, codingPath: codingPath) - return KeyedDecodingContainer(container) - } - - public func unkeyedContainer() -> UnkeyedDecodingContainer { - return ArrowUnkeyedDecoding(self, codingPath: codingPath) - } - - public func singleValueContainer() -> SingleValueDecodingContainer { - return ArrowSingleValueDecoding(self, codingPath: codingPath) - } - - func getCol(_ name: String) throws -> AnyArray { - guard let col = self.nameToCol[name] else { - throw ArrowError.invalid("Column for key \"\(name)\" not found") - } - - return col.array - } - - func getCol(_ index: Int) throws -> AnyArray { - if index >= self.columns.count { - throw ArrowError.outOfBounds(index: Int64(index)) - } - - return self.columns[index].array - } - - func doDecode(_ key: CodingKey) throws -> T? { - let array: AnyArray = try self.getCol(key.stringValue) - return array.asAny(self.rbIndex) as? T - } - - func doDecode(_ col: Int) throws -> T? { - let array: AnyArray = try self.getCol(col) - return array.asAny(self.rbIndex) as? T - } - - func isNull(_ key: CodingKey) throws -> Bool { - let array: AnyArray = try self.getCol(key.stringValue) - return array.asAny(self.rbIndex) == nil - } - - func isNull(_ col: Int) throws -> Bool { - let array: AnyArray = try self.getCol(col) - return array.asAny(self.rbIndex) == nil - } -} - -private struct ArrowUnkeyedDecoding: UnkeyedDecodingContainer { - var codingPath: [CodingKey] - var count: Int? = 0 - var isAtEnd: Bool = false - var currentIndex: Int = 0 - let decoder: ArrowDecoder - - init(_ decoder: ArrowDecoder, codingPath: [CodingKey]) { - self.decoder = decoder - self.codingPath = codingPath - self.count = self.decoder.columns.count - } - - mutating func increment() { - self.currentIndex += 1 - self.isAtEnd = self.currentIndex >= self.count! - } - - mutating func decodeNil() throws -> Bool { - defer {increment()} - return try self.decoder.isNull(self.currentIndex) - } - - mutating func decode(_ type: T.Type) throws -> T where T: Decodable { - if type == Int8?.self || type == Int16?.self || - type == Int32?.self || type == Int64?.self || - type == UInt8?.self || type == UInt16?.self || - type == UInt32?.self || type == UInt64?.self || - type == String?.self || type == Double?.self || - type == Float?.self || type == Date?.self || - type == Bool?.self || type == Bool.self || - type == Int8.self || type == Int16.self || - type == Int32.self || type == Int64.self || - type == UInt8.self || type == UInt16.self || - type == UInt32.self || type == UInt64.self || - type == String.self || type == Double.self || - type == Float.self || type == Date.self { - defer {increment()} - return try self.decoder.doDecode(self.currentIndex)! - } else { - throw ArrowError.invalid("Type \(type) is currently not supported") - } - } - - func nestedContainer( - keyedBy type: NestedKey.Type - ) throws -> KeyedDecodingContainer where NestedKey: CodingKey { - throw ArrowError.invalid("Nested decoding is currently not supported.") - } - - func nestedUnkeyedContainer() throws -> UnkeyedDecodingContainer { - throw ArrowError.invalid("Nested decoding is currently not supported.") - } - - func superDecoder() throws -> Decoder { - throw ArrowError.invalid("super decoding is currently not supported.") - } -} - -private struct ArrowKeyedDecoding: KeyedDecodingContainerProtocol { - var codingPath = [CodingKey]() - var allKeys = [Key]() - let decoder: ArrowDecoder - - init(_ decoder: ArrowDecoder, codingPath: [CodingKey]) { - self.decoder = decoder - self.codingPath = codingPath - } - - func contains(_ key: Key) -> Bool { - return self.decoder.nameToCol.keys.contains(key.stringValue) - } - - func decodeNil(forKey key: Key) throws -> Bool { - try self.decoder.isNull(key) - } - - func decode(_ type: Bool.Type, forKey key: Key) throws -> Bool { - return try self.decoder.doDecode(key)! - } - - func decode(_ type: String.Type, forKey key: Key) throws -> String { - return try self.decoder.doDecode(key)! - } - - func decode(_ type: Double.Type, forKey key: Key) throws -> Double { - return try self.decoder.doDecode(key)! - } - - func decode(_ type: Float.Type, forKey key: Key) throws -> Float { - return try self.decoder.doDecode(key)! - } - - func decode(_ type: Int.Type, forKey key: Key) throws -> Int { - throw ArrowError.invalid( - "Int type is not supported (please use Int8, Int16, Int32 or Int64)") - } - - func decode(_ type: Int8.Type, forKey key: Key) throws -> Int8 { - return try self.decoder.doDecode(key)! - } - - func decode(_ type: Int16.Type, forKey key: Key) throws -> Int16 { - return try self.decoder.doDecode(key)! - } - - func decode(_ type: Int32.Type, forKey key: Key) throws -> Int32 { - return try self.decoder.doDecode(key)! - } - - func decode(_ type: Int64.Type, forKey key: Key) throws -> Int64 { - return try self.decoder.doDecode(key)! - } - - func decode(_ type: UInt.Type, forKey key: Key) throws -> UInt { - throw ArrowError.invalid( - "UInt type is not supported (please use UInt8, UInt16, UInt32 or UInt64)") - } - - func decode(_ type: UInt8.Type, forKey key: Key) throws -> UInt8 { - return try self.decoder.doDecode(key)! - } - - func decode(_ type: UInt16.Type, forKey key: Key) throws -> UInt16 { - return try self.decoder.doDecode(key)! - } - - func decode(_ type: UInt32.Type, forKey key: Key) throws -> UInt32 { - return try self.decoder.doDecode(key)! - } - - func decode(_ type: UInt64.Type, forKey key: Key) throws -> UInt64 { - return try self.decoder.doDecode(key)! - } - - func decode(_ type: T.Type, forKey key: Key) throws -> T where T: Decodable { - if ArrowArrayBuilders.isValidBuilderType(type) || type == Date.self { - return try self.decoder.doDecode(key)! - } else { - throw ArrowError.invalid("Type \(type) is currently not supported") - } - } - - func nestedContainer( - keyedBy type: NestedKey.Type, - forKey key: Key - ) throws -> KeyedDecodingContainer where NestedKey: CodingKey { - throw ArrowError.invalid("Nested decoding is currently not supported.") - } - - func nestedUnkeyedContainer(forKey key: Key) throws -> UnkeyedDecodingContainer { - throw ArrowError.invalid("Nested decoding is currently not supported.") - } - - func superDecoder() throws -> Decoder { - throw ArrowError.invalid("super decoding is currently not supported.") - } - - func superDecoder(forKey key: Key) throws -> Decoder { - throw ArrowError.invalid("super decoding is currently not supported.") - } -} - -private struct ArrowSingleValueDecoding: SingleValueDecodingContainer { - var codingPath = [CodingKey]() - let decoder: ArrowDecoder - - init(_ decoder: ArrowDecoder, codingPath: [CodingKey]) { - self.decoder = decoder - self.codingPath = codingPath - } - - func decodeNil() -> Bool { - do { - return try self.decoder.isNull(self.decoder.singleRBCol) - } catch { - return false - } - } - - func decode(_ type: Bool.Type) throws -> Bool { - return try self.decoder.doDecode(self.decoder.singleRBCol)! - } - - func decode(_ type: String.Type) throws -> String { - return try self.decoder.doDecode(self.decoder.singleRBCol)! - } - - func decode(_ type: Double.Type) throws -> Double { - return try self.decoder.doDecode(self.decoder.singleRBCol)! - } - - func decode(_ type: Float.Type) throws -> Float { - return try self.decoder.doDecode(self.decoder.singleRBCol)! - } - - func decode(_ type: Int.Type) throws -> Int { - throw ArrowError.invalid( - "Int type is not supported (please use Int8, Int16, Int32 or Int64)") - } - - func decode(_ type: Int8.Type) throws -> Int8 { - return try self.decoder.doDecode(self.decoder.singleRBCol)! - } - - func decode(_ type: Int16.Type) throws -> Int16 { - return try self.decoder.doDecode(self.decoder.singleRBCol)! - } - - func decode(_ type: Int32.Type) throws -> Int32 { - return try self.decoder.doDecode(self.decoder.singleRBCol)! - } - - func decode(_ type: Int64.Type) throws -> Int64 { - return try self.decoder.doDecode(self.decoder.singleRBCol)! - } - - func decode(_ type: UInt.Type) throws -> UInt { - throw ArrowError.invalid( - "UInt type is not supported (please use UInt8, UInt16, UInt32 or UInt64)") - } - - func decode(_ type: UInt8.Type) throws -> UInt8 { - return try self.decoder.doDecode(self.decoder.singleRBCol)! - } - - func decode(_ type: UInt16.Type) throws -> UInt16 { - return try self.decoder.doDecode(self.decoder.singleRBCol)! - } - - func decode(_ type: UInt32.Type) throws -> UInt32 { - return try self.decoder.doDecode(self.decoder.singleRBCol)! - } - - func decode(_ type: UInt64.Type) throws -> UInt64 { - return try self.decoder.doDecode(self.decoder.singleRBCol)! - } - - func decode(_ type: T.Type) throws -> T where T: Decodable { - if ArrowArrayBuilders.isValidBuilderType(type) || type == Date.self { - return try self.decoder.doDecode(self.decoder.singleRBCol)! - } else { - throw ArrowError.invalid("Type \(type) is currently not supported") - } - } -} diff --git a/swift/Arrow/Sources/Arrow/ArrowEncoder.swift b/swift/Arrow/Sources/Arrow/ArrowEncoder.swift deleted file mode 100644 index 8c72c0482f2..00000000000 --- a/swift/Arrow/Sources/Arrow/ArrowEncoder.swift +++ /dev/null @@ -1,456 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -import Foundation - -public class ArrowEncoder: Encoder { - public private(set) var builders = [String: ArrowArrayHolderBuilder]() - private var byIndex = [String]() - public var codingPath: [CodingKey] = [] - public var userInfo: [CodingUserInfoKey: Any] = [:] - var errorMsg: String? - // this is used for Dictionary types. A dictionary type - // will give each key and value there own index so instead - // of having a 2 column RecordBatch you would have - // 2 * length(dictionary) column RecordBatch. Which would not - // be the expected output. - var modForIndex: Int? - - public init() {} - - public init(_ builders: [String: ArrowArrayHolderBuilder], byIndex: [String]) { - self.builders = builders - self.byIndex = byIndex - } - - public static func encode(_ data: T) throws -> RecordBatch { - let encoder = try loadEncoder(data) - try data.encode(to: encoder) - return try encoder.finish() - } - - public static func encode(_ rows: [T]) throws -> RecordBatch? { - if rows.isEmpty { - return nil - } - - let encoder = try loadEncoder(rows[0]) - for row in rows { - try row.encode(to: encoder) - } - - return try encoder.finish() - } - - static func loadEncoder(_ data: T) throws -> ArrowEncoder { - // this will check if T is a simple built in type - // (UInt, Int, Int8, String, Date, etc...). - if ArrowArrayBuilders.isValidBuilderType(T.self) { - let builders = ["col0": try ArrowArrayBuilders.loadBuilder(T.self)] - return ArrowEncoder(builders, byIndex: ["col0"]) - } else { - let encoder = ArrowEncoder() - if data is [AnyHashable: Any] { - encoder.modForIndex = 2 - } - - return encoder - } - } - - public func finish() throws -> RecordBatch { - try throwIfInvalid() - let batchBuilder = RecordBatch.Builder() - for key in byIndex { - batchBuilder.addColumn(key, arrowArray: try builders[key]!.toHolder()) - } - - switch batchBuilder.finish() { - case .success(let rb): - return rb - case .failure(let error): - throw error - } - } - - public func container(keyedBy type: Key.Type) -> KeyedEncodingContainer where Key: CodingKey { - var container = ArrowKeyedEncoding(self) - container.codingPath = codingPath - return KeyedEncodingContainer(container) - } - - public func unkeyedContainer() -> UnkeyedEncodingContainer { - return ArrowUnkeyedEncoding(self, codingPath: self.codingPath) - } - - public func singleValueContainer() -> SingleValueEncodingContainer { - return ArrowSingleValueEncoding(self, codingPath: codingPath) - } - - func doEncodeNil(key: CodingKey) throws { - try throwIfInvalid() - guard let builder = builders[key.stringValue] else { - throw ArrowError.invalid("Column not found for key: \(key)") - } - - builder.appendAny(nil) - } - - // This is required by the keyed and unkeyed encoders as columns are - // added when the first row of the data is encoded. This is done due - // to limitations in the Swifts Mirror API (ex: it is unable to correctly - // find the type for String? in [Int: String?]) - @discardableResult - func ensureColumnExists(_ value: T, key: String) throws -> ArrowArrayHolderBuilder { - try throwIfInvalid() - var builder = builders[key] - if builder == nil { - builder = try ArrowArrayBuilders.loadBuilder(T.self) - builders[key] = builder - byIndex.append(key) - } - - return builder! - } - - func getIndex(_ index: Int) -> Int { - return self.modForIndex == nil ? index : index % self.modForIndex! - } - - func doEncodeNil(_ keyIndex: Int) throws { - try throwIfInvalid() - let index = self.getIndex(keyIndex) - if index >= builders.count { - throw ArrowError.outOfBounds(index: Int64(index)) - } - - builders[byIndex[index]]!.appendAny(nil) - } - - func doEncode(_ value: T, key: CodingKey) throws { - try throwIfInvalid() - let builder = try ensureColumnExists(value, key: key.stringValue) - builder.appendAny(value) - } - - func doEncode(_ value: T, keyIndex: Int) throws { - try throwIfInvalid() - let index = self.getIndex(keyIndex) - if index >= builders.count { - if index == builders.count { - try ensureColumnExists(value, key: "col\(index)") - } else { - throw ArrowError.outOfBounds(index: Int64(index)) - } - } - - builders[byIndex[index]]!.appendAny(value) - } - - func throwIfInvalid() throws { - if let errorMsg = self.errorMsg { - throw ArrowError.invalid(errorMsg) - } - } -} - -private struct ArrowKeyedEncoding: KeyedEncodingContainerProtocol { - var codingPath: [CodingKey] = [] - let encoder: ArrowEncoder - init(_ encoder: ArrowEncoder) { - self.encoder = encoder - } - - // If this method is called on row 0 and the encoder is - // lazily bulding holders then this will produce an error - // as this method does not know what the underlying type - // is for the column. This method is not called for - // nullable types (String?, Int32?, Date?) and the workaround - // for this issue would be to predefine the builders for the - // encoder. (I have only encoutered this issue when allowing - // nullable types at the encode func level which is currently - // not allowed) - mutating func encodeNil(forKey key: Key) throws { - try encoder.doEncodeNil(key: key) - } - - mutating func doEncodeIf(_ value: T?, forKey key: Key) throws { - if value == nil { - try encoder.ensureColumnExists(value, key: key.stringValue) - try encoder.doEncodeNil(key: key) - } else { - try encoder.doEncode(value, key: key) - } - } - - mutating func encode(_ value: Bool, forKey key: Key) throws { - try encoder.doEncode(value, key: key) - } - - mutating func encodeIfPresent(_ value: Bool?, forKey key: Key) throws { - try doEncodeIf(value, forKey: key) - } - - mutating func encode(_ value: String, forKey key: Key) throws { - try encoder.doEncode(value, key: key) - } - - mutating func encodeIfPresent(_ value: String?, forKey key: Key) throws { - try doEncodeIf(value, forKey: key) - } - - mutating func encode(_ value: Double, forKey key: Key) throws { - try encoder.doEncode(value, key: key) - } - - mutating func encodeIfPresent(_ value: Double?, forKey key: Key) throws { - try doEncodeIf(value, forKey: key) - } - - mutating func encode(_ value: Float, forKey key: Key) throws { - try encoder.doEncode(value, key: key) - } - - mutating func encodeIfPresent(_ value: Float?, forKey key: Key) throws { - try doEncodeIf(value, forKey: key) - } - - mutating func encode(_ value: Int, forKey key: Key) throws { - throw ArrowError.invalid( - "Int type is not supported (please use Int8, Int16, Int32 or Int64)") - } - - mutating func encodeIfPresent(_ value: Int?, forKey key: Key) throws { - throw ArrowError.invalid( - "Int type is not supported (please use Int8, Int16, Int32 or Int64)") - } - - mutating func encode(_ value: Int8, forKey key: Key) throws { - try encoder.doEncode(value, key: key) - } - - mutating func encodeIfPresent(_ value: Int8?, forKey key: Key) throws { - try doEncodeIf(value, forKey: key) - } - - mutating func encode(_ value: Int16, forKey key: Key) throws { - try encoder.doEncode(value, key: key) - } - - mutating func encodeIfPresent(_ value: Int16?, forKey key: Key) throws { - try doEncodeIf(value, forKey: key) - } - - mutating func encode(_ value: Int32, forKey key: Key) throws { - try encoder.doEncode(value, key: key) - } - - mutating func encodeIfPresent(_ value: Int32?, forKey key: Key) throws { - try doEncodeIf(value, forKey: key) - } - - mutating func encode(_ value: Int64, forKey key: Key) throws { - try encoder.doEncode(value, key: key) - } - - mutating func encodeIfPresent(_ value: Int64?, forKey key: Key) throws { - try doEncodeIf(value, forKey: key) - } - - mutating func encode(_ value: UInt, forKey key: Key) throws { - throw ArrowError.invalid( - "UInt type is not supported (please use UInt8, UInt16, UInt32 or UInt64)") - } - - mutating func encodeIfPresent(_ value: UInt?, forKey key: Key) throws { - throw ArrowError.invalid( - "UInt type is not supported (please use UInt8, UInt16, UInt32 or UInt64)") - } - - mutating func encode(_ value: UInt8, forKey key: Key) throws { - try encoder.doEncode(value, key: key) - } - - mutating func encodeIfPresent(_ value: UInt8?, forKey key: Key) throws { - try doEncodeIf(value, forKey: key) - } - - mutating func encode(_ value: UInt16, forKey key: Key) throws { - try encoder.doEncode(value, key: key) - } - - mutating func encodeIfPresent(_ value: UInt16?, forKey key: Key) throws { - try doEncodeIf(value, forKey: key) - } - - mutating func encode(_ value: UInt32, forKey key: Key) throws { - try encoder.doEncode(value, key: key) - } - - mutating func encodeIfPresent(_ value: UInt32?, forKey key: Key) throws { - try doEncodeIf(value, forKey: key) - } - - mutating func encode(_ value: UInt64, forKey key: Key) throws { - try encoder.doEncode(value, key: key) - } - - mutating func encodeIfPresent(_ value: UInt64?, forKey key: Key) throws { - try doEncodeIf(value, forKey: key) - } - - mutating func encode(_ value: T, forKey key: Key) throws { - if ArrowArrayBuilders.isValidBuilderType(T.self) { - try encoder.doEncode(value, key: key) - } else { - throw ArrowError.invalid("Type \(T.self) is currently not supported") - } - } - - mutating func encodeIfPresent(_ value: T?, forKey key: Self.Key) throws where T: Encodable { - if ArrowArrayBuilders.isValidBuilderType(T?.self) { - try doEncodeIf(value, forKey: key) - } else { - throw ArrowError.invalid("Type \(T.self) is currently not supported") - } - } - - // nested container is currently not allowed. This method doesn't throw - // so setting an error mesg that will be throw by the encoder at the next - // method call that throws - mutating func nestedContainer( - keyedBy keyType: NestedKey.Type, - forKey key: Key) -> KeyedEncodingContainer { - self.encoder.errorMsg = "Nested decoding is currently not supported." - var container = ArrowKeyedEncoding(self.encoder) - container.codingPath = codingPath - return KeyedEncodingContainer(container) - } - - // nested container is currently not allowed. This method doesn't throw - // so setting an error mesg that will be throw by the encoder at the next - // method call that throws - mutating func nestedUnkeyedContainer(forKey key: Key) -> UnkeyedEncodingContainer { - self.encoder.errorMsg = "Nested decoding is currently not supported." - return ArrowUnkeyedEncoding(self.encoder, codingPath: self.codingPath) - } - - // super encoding is currently not allowed. This method doesn't throw - // so setting an error mesg that will be throw by the encoder at the next - // method call that throws - mutating func superEncoder() -> Encoder { - self.encoder.errorMsg = "super encoding is currently not supported." - return self.encoder - } - - // super encoding is currently not allowed. This method doesn't throw - // so setting an error mesg that will be throw by the encoder at the next - // method call that throws - mutating func superEncoder(forKey key: Key) -> Encoder { - self.encoder.errorMsg = "super encoding is currently not supported." - return self.encoder - } -} - -private struct ArrowUnkeyedEncoding: UnkeyedEncodingContainer { - public private(set) var encoder: ArrowEncoder - var codingPath: [CodingKey] = [] - var currentIndex: Int - var count: Int = 0 - - init(_ encoder: ArrowEncoder, codingPath: [CodingKey], currentIndex: Int = 0) { - self.encoder = encoder - self.currentIndex = currentIndex - } - - mutating func increment() { - self.currentIndex += 1 - } - - // If this method is called on row 0 and the encoder is - // lazily bulding holders then this will produce an error - // as this method does not know what the underlying type - // is for the column. This method is not called for - // nullable types (String?, Int32?, Date?) and the workaround - // for this issue would be to predefine the builders for the - // encoder. (I have only encoutered this issue when allowing - // nullable types at the encode func level which is currently - // not allowed) - mutating func encodeNil() throws { - try encoder.doEncodeNil(self.currentIndex) - } - - mutating func encode(_ value: T) throws where T: Encodable { - let type = T.self - if ArrowArrayBuilders.isValidBuilderType(type) { - defer {increment()} - return try self.encoder.doEncode(value, keyIndex: self.currentIndex) - } else { - throw ArrowError.invalid("Type \(type) is currently not supported") - } - } - - // nested container is currently not allowed. This method doesn't throw - // so setting an error mesg that will be throw by the encoder at the next - // method call that throws - mutating func nestedContainer(keyedBy keyType: NestedKey.Type - ) -> KeyedEncodingContainer where NestedKey: CodingKey { - self.encoder.errorMsg = "Nested decoding is currently not supported." - var container = ArrowKeyedEncoding(self.encoder) - container.codingPath = codingPath - return KeyedEncodingContainer(container) - } - - // nested container is currently not allowed. This method doesn't throw - // so setting an error mesg that will be throw by the encoder at the next - // method call that throws - mutating func nestedUnkeyedContainer() -> UnkeyedEncodingContainer { - self.encoder.errorMsg = "Nested decoding is currently not supported." - return ArrowUnkeyedEncoding(self.encoder, codingPath: self.codingPath) - } - - // super encoding is currently not allowed. This method doesn't throw - // so setting an error mesg that will be throw by the encoder at the next - // method call that throws - mutating func superEncoder() -> Encoder { - self.encoder.errorMsg = "super encoding is currently not supported." - return self.encoder - } -} - -private struct ArrowSingleValueEncoding: SingleValueEncodingContainer { - public private(set) var encoder: ArrowEncoder - var codingPath: [CodingKey] = [] - - public init(_ encoder: ArrowEncoder, codingPath: [CodingKey]) { - self.encoder = encoder - self.codingPath = codingPath - } - - mutating func encodeNil() throws { - return try self.encoder.doEncodeNil(0) - } - - mutating func encode(_ value: T) throws { - if ArrowArrayBuilders.isValidBuilderType(T.self) { - return try self.encoder.doEncode(value, keyIndex: 0) - } else { - throw ArrowError.invalid("Type \(T.self) is currently not supported") - } - } -} -// swiftlint:disable:this file_length diff --git a/swift/Arrow/Sources/Arrow/ArrowReader.swift b/swift/Arrow/Sources/Arrow/ArrowReader.swift deleted file mode 100644 index 8515a782afa..00000000000 --- a/swift/Arrow/Sources/Arrow/ArrowReader.swift +++ /dev/null @@ -1,419 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -import FlatBuffers -import Foundation - -let FILEMARKER = "ARROW1" -let CONTINUATIONMARKER = UInt32(0xFFFFFFFF) - -public class ArrowReader { // swiftlint:disable:this type_body_length - private class RecordBatchData { - let schema: org_apache_arrow_flatbuf_Schema - let recordBatch: org_apache_arrow_flatbuf_RecordBatch - private var fieldIndex: Int32 = 0 - private var nodeIndex: Int32 = 0 - private var bufferIndex: Int32 = 0 - init(_ recordBatch: org_apache_arrow_flatbuf_RecordBatch, - schema: org_apache_arrow_flatbuf_Schema) { - self.recordBatch = recordBatch - self.schema = schema - } - - func nextNode() -> org_apache_arrow_flatbuf_FieldNode? { - if nodeIndex >= self.recordBatch.nodesCount {return nil} - defer {nodeIndex += 1} - return self.recordBatch.nodes(at: nodeIndex) - } - - func nextBuffer() -> org_apache_arrow_flatbuf_Buffer? { - if bufferIndex >= self.recordBatch.buffersCount {return nil} - defer {bufferIndex += 1} - return self.recordBatch.buffers(at: bufferIndex) - } - - func nextField() -> org_apache_arrow_flatbuf_Field? { - if fieldIndex >= self.schema.fieldsCount {return nil} - defer {fieldIndex += 1} - return self.schema.fields(at: fieldIndex) - } - - func isDone() -> Bool { - return nodeIndex >= self.recordBatch.nodesCount - } - } - - private struct DataLoadInfo { - let fileData: Data - let messageOffset: Int64 - var batchData: RecordBatchData - } - - public class ArrowReaderResult { - fileprivate var messageSchema: org_apache_arrow_flatbuf_Schema? - public var schema: ArrowSchema? - public var batches = [RecordBatch]() - } - - public init() {} - - private func loadSchema(_ schema: org_apache_arrow_flatbuf_Schema) -> Result { - let builder = ArrowSchema.Builder() - for index in 0 ..< schema.fieldsCount { - let field = schema.fields(at: index)! - let fieldType = findArrowType(field) - if fieldType.info == ArrowType.ArrowUnknown { - return .failure(.unknownType("Unsupported field type found: \(field.typeType)")) - } - let arrowField = ArrowField(field.name!, type: fieldType, isNullable: field.nullable) - builder.addField(arrowField) - } - - return .success(builder.finish()) - } - - private func loadStructData(_ loadInfo: DataLoadInfo, - field: org_apache_arrow_flatbuf_Field) - -> Result { - guard let node = loadInfo.batchData.nextNode() else { - return .failure(.invalid("Node not found")) - } - - guard let nullBuffer = loadInfo.batchData.nextBuffer() else { - return .failure(.invalid("Null buffer not found")) - } - - let nullLength = UInt(ceil(Double(node.length) / 8)) - let arrowNullBuffer = makeBuffer(nullBuffer, fileData: loadInfo.fileData, - length: nullLength, messageOffset: loadInfo.messageOffset) - var children = [ArrowData]() - for index in 0.. Result { - guard let node = loadInfo.batchData.nextNode() else { - return .failure(.invalid("Node not found")) - } - - guard let nullBuffer = loadInfo.batchData.nextBuffer() else { - return .failure(.invalid("Null buffer not found")) - } - - guard let valueBuffer = loadInfo.batchData.nextBuffer() else { - return .failure(.invalid("Value buffer not found")) - } - - let nullLength = UInt(ceil(Double(node.length) / 8)) - let arrowNullBuffer = makeBuffer(nullBuffer, fileData: loadInfo.fileData, - length: nullLength, messageOffset: loadInfo.messageOffset) - let arrowValueBuffer = makeBuffer(valueBuffer, fileData: loadInfo.fileData, - length: UInt(node.length), messageOffset: loadInfo.messageOffset) - return makeArrayHolder(field, buffers: [arrowNullBuffer, arrowValueBuffer], - nullCount: UInt(node.nullCount), children: nil, - rbLength: UInt(loadInfo.batchData.recordBatch.length)) - } - - private func loadVariableData( - _ loadInfo: DataLoadInfo, - field: org_apache_arrow_flatbuf_Field) - -> Result { - guard let node = loadInfo.batchData.nextNode() else { - return .failure(.invalid("Node not found")) - } - - guard let nullBuffer = loadInfo.batchData.nextBuffer() else { - return .failure(.invalid("Null buffer not found")) - } - - guard let offsetBuffer = loadInfo.batchData.nextBuffer() else { - return .failure(.invalid("Offset buffer not found")) - } - - guard let valueBuffer = loadInfo.batchData.nextBuffer() else { - return .failure(.invalid("Value buffer not found")) - } - - let nullLength = UInt(ceil(Double(node.length) / 8)) - let arrowNullBuffer = makeBuffer(nullBuffer, fileData: loadInfo.fileData, - length: nullLength, messageOffset: loadInfo.messageOffset) - let arrowOffsetBuffer = makeBuffer(offsetBuffer, fileData: loadInfo.fileData, - length: UInt(node.length), messageOffset: loadInfo.messageOffset) - let arrowValueBuffer = makeBuffer(valueBuffer, fileData: loadInfo.fileData, - length: UInt(node.length), messageOffset: loadInfo.messageOffset) - return makeArrayHolder(field, buffers: [arrowNullBuffer, arrowOffsetBuffer, arrowValueBuffer], - nullCount: UInt(node.nullCount), children: nil, - rbLength: UInt(loadInfo.batchData.recordBatch.length)) - } - - private func loadField( - _ loadInfo: DataLoadInfo, - field: org_apache_arrow_flatbuf_Field) - -> Result { - if isNestedType(field.typeType) { - return loadStructData(loadInfo, field: field) - } else if isFixedPrimitive(field.typeType) { - return loadPrimitiveData(loadInfo, field: field) - } else { - return loadVariableData(loadInfo, field: field) - } - } - - private func loadRecordBatch( - _ recordBatch: org_apache_arrow_flatbuf_RecordBatch, - schema: org_apache_arrow_flatbuf_Schema, - arrowSchema: ArrowSchema, - data: Data, - messageEndOffset: Int64 - ) -> Result { - var columns: [ArrowArrayHolder] = [] - let batchData = RecordBatchData(recordBatch, schema: schema) - let loadInfo = DataLoadInfo(fileData: data, - messageOffset: messageEndOffset, - batchData: batchData) - while !batchData.isDone() { - guard let field = batchData.nextField() else { - return .failure(.invalid("Field not found")) - } - - let result = loadField(loadInfo, field: field) - switch result { - case .success(let holder): - columns.append(holder) - case .failure(let error): - return .failure(error) - } - } - - return .success(RecordBatch(arrowSchema, columns: columns)) - } - - /* - This is for reading the Arrow streaming format. The Arrow streaming format - is slightly different from the Arrow File format as it doesn't contain a header - and footer. - */ - public func readStreaming( // swiftlint:disable:this function_body_length - _ input: Data, - useUnalignedBuffers: Bool = false - ) -> Result { - let result = ArrowReaderResult() - var offset: Int = 0 - var length = getUInt32(input, offset: offset) - var streamData = input - var schemaMessage: org_apache_arrow_flatbuf_Schema? - while length != 0 { - if length == CONTINUATIONMARKER { - offset += Int(MemoryLayout.size) - length = getUInt32(input, offset: offset) - if length == 0 { - return .success(result) - } - } - - offset += Int(MemoryLayout.size) - streamData = input[offset...] - let dataBuffer = ByteBuffer( - data: streamData, - allowReadingUnalignedBuffers: true) - let message = org_apache_arrow_flatbuf_Message.getRootAsMessage(bb: dataBuffer) - switch message.headerType { - case .recordbatch: - do { - let rbMessage = message.header(type: org_apache_arrow_flatbuf_RecordBatch.self)! - let recordBatch = try loadRecordBatch( - rbMessage, - schema: schemaMessage!, - arrowSchema: result.schema!, - data: input, - messageEndOffset: (Int64(offset) + Int64(length))).get() - result.batches.append(recordBatch) - offset += Int(message.bodyLength + Int64(length)) - length = getUInt32(input, offset: offset) - } catch let error as ArrowError { - return .failure(error) - } catch { - return .failure(.unknownError("Unexpected error: \(error)")) - } - case .schema: - schemaMessage = message.header(type: org_apache_arrow_flatbuf_Schema.self)! - let schemaResult = loadSchema(schemaMessage!) - switch schemaResult { - case .success(let schema): - result.schema = schema - case .failure(let error): - return .failure(error) - } - offset += Int(message.bodyLength + Int64(length)) - length = getUInt32(input, offset: offset) - default: - return .failure(.unknownError("Unhandled header type: \(message.headerType)")) - } - } - return .success(result) - } - - /* - This is for reading the Arrow file format. The Arrow file format supports - random accessing the data. The Arrow file format contains a header and - footer around the Arrow streaming format. - */ - public func readFile( // swiftlint:disable:this function_body_length - _ fileData: Data, - useUnalignedBuffers: Bool = false - ) -> Result { - let footerLength = fileData.withUnsafeBytes { rawBuffer in - rawBuffer.loadUnaligned(fromByteOffset: fileData.count - 4, as: Int32.self) - } - - let result = ArrowReaderResult() - let footerStartOffset = fileData.count - Int(footerLength + 4) - let footerData = fileData[footerStartOffset...] - let footerBuffer = ByteBuffer( - data: footerData, - allowReadingUnalignedBuffers: useUnalignedBuffers) - let footer = org_apache_arrow_flatbuf_Footer.getRootAsFooter(bb: footerBuffer) - let schemaResult = loadSchema(footer.schema!) - switch schemaResult { - case .success(let schema): - result.schema = schema - case .failure(let error): - return .failure(error) - } - - for index in 0 ..< footer.recordBatchesCount { - let recordBatch = footer.recordBatches(at: index)! - var messageLength = fileData.withUnsafeBytes { rawBuffer in - rawBuffer.loadUnaligned(fromByteOffset: Int(recordBatch.offset), as: UInt32.self) - } - - var messageOffset: Int64 = 1 - if messageLength == CONTINUATIONMARKER { - messageOffset += 1 - messageLength = fileData.withUnsafeBytes { rawBuffer in - rawBuffer.loadUnaligned( - fromByteOffset: Int(recordBatch.offset + Int64(MemoryLayout.size)), - as: UInt32.self) - } - } - - let messageStartOffset = recordBatch.offset + (Int64(MemoryLayout.size) * messageOffset) - let messageEndOffset = messageStartOffset + Int64(messageLength) - let recordBatchData = fileData[messageStartOffset ..< messageEndOffset] - let mbb = ByteBuffer( - data: recordBatchData, - allowReadingUnalignedBuffers: useUnalignedBuffers) - let message = org_apache_arrow_flatbuf_Message.getRootAsMessage(bb: mbb) - switch message.headerType { - case .recordbatch: - do { - let rbMessage = message.header(type: org_apache_arrow_flatbuf_RecordBatch.self)! - let recordBatch = try loadRecordBatch( - rbMessage, - schema: footer.schema!, - arrowSchema: result.schema!, - data: fileData, - messageEndOffset: messageEndOffset).get() - result.batches.append(recordBatch) - } catch let error as ArrowError { - return .failure(error) - } catch { - return .failure(.unknownError("Unexpected error: \(error)")) - } - default: - return .failure(.unknownError("Unhandled header type: \(message.headerType)")) - } - } - - return .success(result) - } - - public func fromFile(_ fileURL: URL) -> Result { - do { - let fileData = try Data(contentsOf: fileURL) - if !validateFileData(fileData) { - return .failure(.ioError("Not a valid arrow file.")) - } - - let markerLength = FILEMARKER.utf8.count - let footerLengthEnd = Int(fileData.count - markerLength) - let data = fileData[..<(footerLengthEnd)] - return readFile(data) - } catch { - return .failure(.unknownError("Error loading file: \(error)")) - } - } - - static public func makeArrowReaderResult() -> ArrowReaderResult { - return ArrowReaderResult() - } - - public func fromMessage( - _ dataHeader: Data, - dataBody: Data, - result: ArrowReaderResult, - useUnalignedBuffers: Bool = false - ) -> Result { - let mbb = ByteBuffer( - data: dataHeader, - allowReadingUnalignedBuffers: useUnalignedBuffers) - let message = org_apache_arrow_flatbuf_Message.getRootAsMessage(bb: mbb) - switch message.headerType { - case .schema: - let sMessage = message.header(type: org_apache_arrow_flatbuf_Schema.self)! - switch loadSchema(sMessage) { - case .success(let schema): - result.schema = schema - result.messageSchema = sMessage - return .success(()) - case .failure(let error): - return .failure(error) - } - case .recordbatch: - let rbMessage = message.header(type: org_apache_arrow_flatbuf_RecordBatch.self)! - do { - let recordBatch = try loadRecordBatch( - rbMessage, schema: result.messageSchema!, arrowSchema: result.schema!, - data: dataBody, messageEndOffset: 0).get() - result.batches.append(recordBatch) - return .success(()) - } catch let error as ArrowError { - return .failure(error) - } catch { - return .failure(.unknownError("Unexpected error: \(error)")) - } - default: - return .failure(.unknownError("Unhandled header type: \(message.headerType)")) - } - } - -} -// swiftlint:disable:this file_length diff --git a/swift/Arrow/Sources/Arrow/ArrowReaderHelper.swift b/swift/Arrow/Sources/Arrow/ArrowReaderHelper.swift deleted file mode 100644 index 18cf41ad25a..00000000000 --- a/swift/Arrow/Sources/Arrow/ArrowReaderHelper.swift +++ /dev/null @@ -1,298 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -import FlatBuffers -import Foundation - -private func makeBinaryHolder(_ buffers: [ArrowBuffer], - nullCount: UInt) -> Result { - do { - let arrowType = ArrowType(ArrowType.ArrowBinary) - let arrowData = try ArrowData(arrowType, buffers: buffers, nullCount: nullCount) - return .success(ArrowArrayHolderImpl(try BinaryArray(arrowData))) - } catch let error as ArrowError { - return .failure(error) - } catch { - return .failure(.unknownError("\(error)")) - } -} - -private func makeStringHolder(_ buffers: [ArrowBuffer], - nullCount: UInt) -> Result { - do { - let arrowType = ArrowType(ArrowType.ArrowString) - let arrowData = try ArrowData(arrowType, buffers: buffers, nullCount: nullCount) - return .success(ArrowArrayHolderImpl(try StringArray(arrowData))) - } catch let error as ArrowError { - return .failure(error) - } catch { - return .failure(.unknownError("\(error)")) - } -} - -private func makeDateHolder(_ field: ArrowField, - buffers: [ArrowBuffer], - nullCount: UInt -) -> Result { - do { - if field.type.id == .date32 { - let arrowData = try ArrowData(field.type, buffers: buffers, nullCount: nullCount) - return .success(ArrowArrayHolderImpl(try Date32Array(arrowData))) - } - - let arrowData = try ArrowData(field.type, buffers: buffers, nullCount: nullCount) - return .success(ArrowArrayHolderImpl(try Date64Array(arrowData))) - } catch let error as ArrowError { - return .failure(error) - } catch { - return .failure(.unknownError("\(error)")) - } -} - -private func makeTimeHolder(_ field: ArrowField, - buffers: [ArrowBuffer], - nullCount: UInt -) -> Result { - do { - if field.type.id == .time32 { - if let arrowType = field.type as? ArrowTypeTime32 { - let arrowData = try ArrowData(arrowType, buffers: buffers, nullCount: nullCount) - return .success(ArrowArrayHolderImpl(try FixedArray(arrowData))) - } else { - return .failure(.invalid("Incorrect field type for time: \(field.type)")) - } - } - - if let arrowType = field.type as? ArrowTypeTime64 { - let arrowData = try ArrowData(arrowType, buffers: buffers, nullCount: nullCount) - return .success(ArrowArrayHolderImpl(try FixedArray(arrowData))) - } else { - return .failure(.invalid("Incorrect field type for time: \(field.type)")) - } - } catch let error as ArrowError { - return .failure(error) - } catch { - return .failure(.unknownError("\(error)")) - } -} - -private func makeBoolHolder(_ buffers: [ArrowBuffer], - nullCount: UInt) -> Result { - do { - let arrowType = ArrowType(ArrowType.ArrowBool) - let arrowData = try ArrowData(arrowType, buffers: buffers, nullCount: nullCount) - return .success(ArrowArrayHolderImpl(try BoolArray(arrowData))) - } catch let error as ArrowError { - return .failure(error) - } catch { - return .failure(.unknownError("\(error)")) - } -} - -private func makeFixedHolder( - _: T.Type, field: ArrowField, buffers: [ArrowBuffer], - nullCount: UInt -) -> Result { - do { - let arrowData = try ArrowData(field.type, buffers: buffers, nullCount: nullCount) - return .success(ArrowArrayHolderImpl(try FixedArray(arrowData))) - } catch let error as ArrowError { - return .failure(error) - } catch { - return .failure(.unknownError("\(error)")) - } -} - - func makeStructHolder( - _ field: ArrowField, - buffers: [ArrowBuffer], - nullCount: UInt, - children: [ArrowData], - rbLength: UInt -) -> Result { - do { - let arrowData = try ArrowData(field.type, - buffers: buffers, children: children, - nullCount: nullCount, length: rbLength) - return .success(ArrowArrayHolderImpl(try StructArray(arrowData))) - } catch let error as ArrowError { - return .failure(error) - } catch { - return .failure(.unknownError("\(error)")) - } -} - -func makeArrayHolder( - _ field: org_apache_arrow_flatbuf_Field, - buffers: [ArrowBuffer], - nullCount: UInt, - children: [ArrowData]?, - rbLength: UInt -) -> Result { - let arrowField = fromProto(field: field) - return makeArrayHolder(arrowField, buffers: buffers, nullCount: nullCount, children: children, rbLength: rbLength) -} - -func makeArrayHolder( // swiftlint:disable:this cyclomatic_complexity - _ field: ArrowField, - buffers: [ArrowBuffer], - nullCount: UInt, - children: [ArrowData]?, - rbLength: UInt -) -> Result { - let typeId = field.type.id - switch typeId { - case .int8: - return makeFixedHolder(Int8.self, field: field, buffers: buffers, nullCount: nullCount) - case .uint8: - return makeFixedHolder(UInt8.self, field: field, buffers: buffers, nullCount: nullCount) - case .int16: - return makeFixedHolder(Int16.self, field: field, buffers: buffers, nullCount: nullCount) - case .uint16: - return makeFixedHolder(UInt16.self, field: field, buffers: buffers, nullCount: nullCount) - case .int32: - return makeFixedHolder(Int32.self, field: field, buffers: buffers, nullCount: nullCount) - case .uint32: - return makeFixedHolder(UInt32.self, field: field, buffers: buffers, nullCount: nullCount) - case .int64: - return makeFixedHolder(Int64.self, field: field, buffers: buffers, nullCount: nullCount) - case .uint64: - return makeFixedHolder(UInt64.self, field: field, buffers: buffers, nullCount: nullCount) - case .boolean: - return makeBoolHolder(buffers, nullCount: nullCount) - case .float: - return makeFixedHolder(Float.self, field: field, buffers: buffers, nullCount: nullCount) - case .double: - return makeFixedHolder(Double.self, field: field, buffers: buffers, nullCount: nullCount) - case .string: - return makeStringHolder(buffers, nullCount: nullCount) - case .binary: - return makeBinaryHolder(buffers, nullCount: nullCount) - case .date32, .date64: - return makeDateHolder(field, buffers: buffers, nullCount: nullCount) - case .time32, .time64: - return makeTimeHolder(field, buffers: buffers, nullCount: nullCount) - case .strct: - return makeStructHolder(field, buffers: buffers, nullCount: nullCount, children: children!, rbLength: rbLength) - default: - return .failure(.unknownType("Type \(typeId) currently not supported")) - } -} - -func makeBuffer(_ buffer: org_apache_arrow_flatbuf_Buffer, fileData: Data, - length: UInt, messageOffset: Int64) -> ArrowBuffer { - let startOffset = messageOffset + buffer.offset - let endOffset = startOffset + buffer.length - let bufferData = [UInt8](fileData[startOffset ..< endOffset]) - return ArrowBuffer.createBuffer(bufferData, length: length) -} - -func isFixedPrimitive(_ type: org_apache_arrow_flatbuf_Type_) -> Bool { - switch type { - case .int, .bool, .floatingpoint, .date, .time: - return true - default: - return false - } -} - -func isNestedType(_ type: org_apache_arrow_flatbuf_Type_) -> Bool { - switch type { - case .struct_: - return true - default: - return false - } -} - -func findArrowType( // swiftlint:disable:this cyclomatic_complexity function_body_length - _ field: org_apache_arrow_flatbuf_Field) -> ArrowType { - let type = field.typeType - switch type { - case .int: - let intType = field.type(type: org_apache_arrow_flatbuf_Int.self)! - let bitWidth = intType.bitWidth - if bitWidth == 8 { return ArrowType(intType.isSigned ? ArrowType.ArrowInt8 : ArrowType.ArrowUInt8) } - if bitWidth == 16 { return ArrowType(intType.isSigned ? ArrowType.ArrowInt16 : ArrowType.ArrowUInt16) } - if bitWidth == 32 { return ArrowType(intType.isSigned ? ArrowType.ArrowInt32 : ArrowType.ArrowUInt32) } - if bitWidth == 64 { return ArrowType(intType.isSigned ? ArrowType.ArrowInt64 : ArrowType.ArrowUInt64) } - return ArrowType(ArrowType.ArrowUnknown) - case .bool: - return ArrowType(ArrowType.ArrowBool) - case .floatingpoint: - let floatType = field.type(type: org_apache_arrow_flatbuf_FloatingPoint.self)! - switch floatType.precision { - case .single: - return ArrowType(ArrowType.ArrowFloat) - case .double: - return ArrowType(ArrowType.ArrowDouble) - default: - return ArrowType(ArrowType.ArrowUnknown) - } - case .utf8: - return ArrowType(ArrowType.ArrowString) - case .binary: - return ArrowType(ArrowType.ArrowBinary) - case .date: - let dateType = field.type(type: org_apache_arrow_flatbuf_Date.self)! - if dateType.unit == .day { - return ArrowType(ArrowType.ArrowDate32) - } - - return ArrowType(ArrowType.ArrowDate64) - case .time: - let timeType = field.type(type: org_apache_arrow_flatbuf_Time.self)! - if timeType.unit == .second || timeType.unit == .millisecond { - return ArrowTypeTime32(timeType.unit == .second ? .seconds : .milliseconds) - } - - return ArrowTypeTime64(timeType.unit == .microsecond ? .microseconds : .nanoseconds) - case .struct_: - _ = field.type(type: org_apache_arrow_flatbuf_Struct_.self)! - var fields = [ArrowField]() - for index in 0..= recordBatch.buffersCount { - throw ArrowError.outOfBounds(index: Int64(index)) - } -} - -func validateFileData(_ data: Data) -> Bool { - let markerLength = FILEMARKER.utf8.count - let startString = String(decoding: data[.. UInt32 { - let token = data.withUnsafeBytes { rawBuffer in - rawBuffer.loadUnaligned(fromByteOffset: offset, as: UInt32.self) - } - return token -} diff --git a/swift/Arrow/Sources/Arrow/ArrowSchema.swift b/swift/Arrow/Sources/Arrow/ArrowSchema.swift deleted file mode 100644 index 65c506d51cd..00000000000 --- a/swift/Arrow/Sources/Arrow/ArrowSchema.swift +++ /dev/null @@ -1,73 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -import Foundation -public class ArrowField { - public let type: ArrowType - public let name: String - public let isNullable: Bool - - init(_ name: String, type: ArrowType, isNullable: Bool) { - self.name = name - self.type = type - self.isNullable = isNullable - } -} - -public class ArrowSchema { - public let fields: [ArrowField] - public let fieldLookup: [String: Int] - init(_ fields: [ArrowField]) { - var fieldLookup = [String: Int]() - for (index, field) in fields.enumerated() { - fieldLookup[field.name] = index - } - - self.fields = fields - self.fieldLookup = fieldLookup - } - - public func field(_ index: Int) -> ArrowField { - return self.fields[index] - } - - public func fieldIndex(_ name: String) -> Int? { - return self.fieldLookup[name] - } - - public class Builder { - private var fields: [ArrowField] = [] - - public init() {} - - @discardableResult - public func addField(_ field: ArrowField) -> Builder { - fields.append(field) - return self - } - - @discardableResult - public func addField(_ name: String, type: ArrowType, isNullable: Bool) -> Builder { - fields.append(ArrowField(name, type: type, isNullable: isNullable)) - return self - } - - public func finish() -> ArrowSchema { - return ArrowSchema(fields) - } - } -} diff --git a/swift/Arrow/Sources/Arrow/ArrowTable.swift b/swift/Arrow/Sources/Arrow/ArrowTable.swift deleted file mode 100644 index dedf90f791c..00000000000 --- a/swift/Arrow/Sources/Arrow/ArrowTable.swift +++ /dev/null @@ -1,202 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -import Foundation - -public class ArrowColumn { - public let field: ArrowField - fileprivate let dataHolder: ChunkedArrayHolder - public var type: ArrowType {return self.dataHolder.type} - public var length: UInt {return self.dataHolder.length} - public var nullCount: UInt {return self.dataHolder.nullCount} - - public func data() -> ChunkedArray { - return (self.dataHolder.holder as! ChunkedArray) // swiftlint:disable:this force_cast - } - - public var name: String {return field.name} - public init(_ field: ArrowField, chunked: ChunkedArrayHolder) { - self.field = field - self.dataHolder = chunked - } -} - -public class ArrowTable { - public let schema: ArrowSchema - public var columnCount: UInt {return UInt(self.columns.count)} - public let rowCount: UInt - public let columns: [ArrowColumn] - init(_ schema: ArrowSchema, columns: [ArrowColumn]) { - self.schema = schema - self.columns = columns - self.rowCount = columns[0].length - } - - public static func from(recordBatches: [RecordBatch]) -> Result { - if recordBatches.isEmpty { - return .failure(.arrayHasNoElements) - } - - var holders = [[ArrowArrayHolder]]() - let schema = recordBatches[0].schema - for recordBatch in recordBatches { - for index in 0.. Result { - do { - return .success(try holders[0].getArrowColumn(field, holders)) - } catch { - return .failure(.runtimeError("\(error)")) - } - } - - public class Builder { - let schemaBuilder = ArrowSchema.Builder() - var columns = [ArrowColumn]() - - public init() {} - - @discardableResult - public func addColumn(_ fieldName: String, arrowArray: ArrowArray) throws -> Builder { - return self.addColumn(fieldName, chunked: try ChunkedArray([arrowArray])) - } - - @discardableResult - public func addColumn(_ fieldName: String, chunked: ChunkedArray) -> Builder { - let field = ArrowField(fieldName, type: chunked.type, isNullable: chunked.nullCount != 0) - self.schemaBuilder.addField(field) - self.columns.append(ArrowColumn(field, chunked: ChunkedArrayHolder(chunked))) - return self - } - - @discardableResult - public func addColumn(_ field: ArrowField, arrowArray: ArrowArray) throws -> Builder { - self.schemaBuilder.addField(field) - let holder = ChunkedArrayHolder(try ChunkedArray([arrowArray])) - self.columns.append(ArrowColumn(field, chunked: holder)) - return self - } - - @discardableResult - public func addColumn(_ field: ArrowField, chunked: ChunkedArray) -> Builder { - self.schemaBuilder.addField(field) - self.columns.append(ArrowColumn(field, chunked: ChunkedArrayHolder(chunked))) - return self - } - - @discardableResult - public func addColumn(_ column: ArrowColumn) -> Builder { - self.schemaBuilder.addField(column.field) - self.columns.append(column) - return self - } - - public func finish() -> ArrowTable { - return ArrowTable(self.schemaBuilder.finish(), columns: self.columns) - } - } -} - -public class RecordBatch { - public let schema: ArrowSchema - public var columnCount: UInt {return UInt(self.columns.count)} - public let columns: [ArrowArrayHolder] - public let length: UInt - public init(_ schema: ArrowSchema, columns: [ArrowArrayHolder]) { - self.schema = schema - self.columns = columns - self.length = columns[0].length - } - - public class Builder { - let schemaBuilder = ArrowSchema.Builder() - var columns = [ArrowArrayHolder]() - - public init() {} - - @discardableResult - public func addColumn(_ fieldName: String, arrowArray: ArrowArrayHolder) -> Builder { - let field = ArrowField(fieldName, type: arrowArray.type, isNullable: arrowArray.nullCount != 0) - self.schemaBuilder.addField(field) - self.columns.append(arrowArray) - return self - } - - @discardableResult - public func addColumn(_ field: ArrowField, arrowArray: ArrowArrayHolder) -> Builder { - self.schemaBuilder.addField(field) - self.columns.append(arrowArray) - return self - } - - public func finish() -> Result { - if columns.count > 0 { - let columnLength = columns[0].length - for column in columns { - if column.length != columnLength { // swiftlint:disable:this for_where - return .failure(.runtimeError("Columns have different sizes")) - } - } - } - return .success(RecordBatch(self.schemaBuilder.finish(), columns: self.columns)) - } - } - - public func data(for columnIndex: Int) -> ArrowArray { - let arrayHolder = column(columnIndex) - return (arrayHolder.array as! ArrowArray) // swiftlint:disable:this force_cast - } - - public func anyData(for columnIndex: Int) -> AnyArray { - let arrayHolder = column(columnIndex) - return arrayHolder.array - } - - public func column(_ index: Int) -> ArrowArrayHolder { - return self.columns[index] - } - - public func column(_ name: String) -> ArrowArrayHolder? { - if let index = self.schema.fieldIndex(name) { - return self.columns[index] - } - - return nil - } -} diff --git a/swift/Arrow/Sources/Arrow/ArrowType.swift b/swift/Arrow/Sources/Arrow/ArrowType.swift deleted file mode 100644 index b44f8591859..00000000000 --- a/swift/Arrow/Sources/Arrow/ArrowType.swift +++ /dev/null @@ -1,405 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -import Foundation - -public typealias Time32 = Int32 -public typealias Time64 = Int64 -public typealias Date32 = Int32 -public typealias Date64 = Int64 - -func FlatBuffersVersion_23_1_4() { // swiftlint:disable:this identifier_name -} - -public enum ArrowError: Error { - case none - case unknownType(String) - case runtimeError(String) - case outOfBounds(index: Int64) - case arrayHasNoElements - case unknownError(String) - case notImplemented - case ioError(String) - case invalid(String) -} - -public enum ArrowTypeId { - case binary - case boolean - case date32 - case date64 - case dateType - case decimal128 - case decimal256 - case dictionary - case double - case fixedSizeBinary - case fixedWidthType - case float - // case HalfFloatType - case int16 - case int32 - case int64 - case int8 - case integer - case intervalUnit - case list - case nested - case null - case number - case string - case strct - case time32 - case time64 - case time - case uint16 - case uint32 - case uint64 - case uint8 - case union - case unknown -} - -public enum ArrowTime32Unit { - case seconds - case milliseconds -} - -public enum ArrowTime64Unit { - case microseconds - case nanoseconds -} - -public class ArrowTypeTime32: ArrowType { - let unit: ArrowTime32Unit - public init(_ unit: ArrowTime32Unit) { - self.unit = unit - super.init(ArrowType.ArrowTime32) - } - - public override var cDataFormatId: String { - get throws { - switch self.unit { - case .milliseconds: - return "ttm" - case .seconds: - return "tts" - } - } - } -} - -public class ArrowTypeTime64: ArrowType { - let unit: ArrowTime64Unit - public init(_ unit: ArrowTime64Unit) { - self.unit = unit - super.init(ArrowType.ArrowTime64) - } - - public override var cDataFormatId: String { - get throws { - switch self.unit { - case .microseconds: - return "ttu" - case .nanoseconds: - return "ttn" - } - } - } -} - -public class ArrowNestedType: ArrowType { - let fields: [ArrowField] - public init(_ info: ArrowType.Info, fields: [ArrowField]) { - self.fields = fields - super.init(info) - } -} - -public class ArrowType { - public private(set) var info: ArrowType.Info - public static let ArrowInt8 = Info.primitiveInfo(ArrowTypeId.int8) - public static let ArrowInt16 = Info.primitiveInfo(ArrowTypeId.int16) - public static let ArrowInt32 = Info.primitiveInfo(ArrowTypeId.int32) - public static let ArrowInt64 = Info.primitiveInfo(ArrowTypeId.int64) - public static let ArrowUInt8 = Info.primitiveInfo(ArrowTypeId.uint8) - public static let ArrowUInt16 = Info.primitiveInfo(ArrowTypeId.uint16) - public static let ArrowUInt32 = Info.primitiveInfo(ArrowTypeId.uint32) - public static let ArrowUInt64 = Info.primitiveInfo(ArrowTypeId.uint64) - public static let ArrowFloat = Info.primitiveInfo(ArrowTypeId.float) - public static let ArrowDouble = Info.primitiveInfo(ArrowTypeId.double) - public static let ArrowUnknown = Info.primitiveInfo(ArrowTypeId.unknown) - public static let ArrowString = Info.variableInfo(ArrowTypeId.string) - public static let ArrowBool = Info.primitiveInfo(ArrowTypeId.boolean) - public static let ArrowDate32 = Info.primitiveInfo(ArrowTypeId.date32) - public static let ArrowDate64 = Info.primitiveInfo(ArrowTypeId.date64) - public static let ArrowBinary = Info.variableInfo(ArrowTypeId.binary) - public static let ArrowTime32 = Info.timeInfo(ArrowTypeId.time32) - public static let ArrowTime64 = Info.timeInfo(ArrowTypeId.time64) - public static let ArrowStruct = Info.complexInfo(ArrowTypeId.strct) - - public init(_ info: ArrowType.Info) { - self.info = info - } - - public var id: ArrowTypeId { - switch self.info { - case .primitiveInfo(let id): - return id - case .timeInfo(let id): - return id - case .variableInfo(let id): - return id - case .complexInfo(let id): - return id - } - } - - public enum Info { - case primitiveInfo(ArrowTypeId) - case variableInfo(ArrowTypeId) - case timeInfo(ArrowTypeId) - case complexInfo(ArrowTypeId) - } - - public static func infoForType( // swiftlint:disable:this cyclomatic_complexity - _ type: Any.Type) -> ArrowType.Info { - if type == String.self { - return ArrowType.ArrowString - } else if type == Date.self { - return ArrowType.ArrowDate64 - } else if type == Bool.self { - return ArrowType.ArrowBool - } else if type == Data.self { - return ArrowType.ArrowBinary - } else if type == Int8.self { - return ArrowType.ArrowInt8 - } else if type == Int16.self { - return ArrowType.ArrowInt16 - } else if type == Int32.self { - return ArrowType.ArrowInt32 - } else if type == Int64.self { - return ArrowType.ArrowInt64 - } else if type == UInt8.self { - return ArrowType.ArrowUInt8 - } else if type == UInt16.self { - return ArrowType.ArrowUInt16 - } else if type == UInt32.self { - return ArrowType.ArrowUInt32 - } else if type == UInt64.self { - return ArrowType.ArrowUInt64 - } else if type == Float.self { - return ArrowType.ArrowFloat - } else if type == Double.self { - return ArrowType.ArrowDouble - } else { - return ArrowType.ArrowUnknown - } - } - - public static func infoForNumericType(_ type: T.Type) -> ArrowType.Info { - if type == Int8.self { - return ArrowType.ArrowInt8 - } else if type == Int16.self { - return ArrowType.ArrowInt16 - } else if type == Int32.self { - return ArrowType.ArrowInt32 - } else if type == Int64.self { - return ArrowType.ArrowInt64 - } else if type == UInt8.self { - return ArrowType.ArrowUInt8 - } else if type == UInt16.self { - return ArrowType.ArrowUInt16 - } else if type == UInt32.self { - return ArrowType.ArrowUInt32 - } else if type == UInt64.self { - return ArrowType.ArrowUInt64 - } else if type == Float.self { - return ArrowType.ArrowFloat - } else if type == Double.self { - return ArrowType.ArrowDouble - } else { - return ArrowType.ArrowUnknown - } - } - - public func getStride( // swiftlint:disable:this cyclomatic_complexity - ) -> Int { - switch self.id { - case .int8: - return MemoryLayout.stride - case .int16: - return MemoryLayout.stride - case .int32: - return MemoryLayout.stride - case .int64: - return MemoryLayout.stride - case .uint8: - return MemoryLayout.stride - case .uint16: - return MemoryLayout.stride - case .uint32: - return MemoryLayout.stride - case .uint64: - return MemoryLayout.stride - case .float: - return MemoryLayout.stride - case .double: - return MemoryLayout.stride - case .boolean: - return MemoryLayout.stride - case .date32: - return MemoryLayout.stride - case .date64: - return MemoryLayout.stride - case .time32: - return MemoryLayout.stride - case .time64: - return MemoryLayout.stride - case .binary: - return MemoryLayout.stride - case .string: - return MemoryLayout.stride - case .strct: - return 0 - default: - fatalError("Stride requested for unknown type: \(self)") - } - } - - public var cDataFormatId: String { - get throws { - switch self.id { - case ArrowTypeId.int8: - return "c" - case ArrowTypeId.int16: - return "s" - case ArrowTypeId.int32: - return "i" - case ArrowTypeId.int64: - return "l" - case ArrowTypeId.uint8: - return "C" - case ArrowTypeId.uint16: - return "S" - case ArrowTypeId.uint32: - return "I" - case ArrowTypeId.uint64: - return "L" - case ArrowTypeId.float: - return "f" - case ArrowTypeId.double: - return "g" - case ArrowTypeId.boolean: - return "b" - case ArrowTypeId.date32: - return "tdD" - case ArrowTypeId.date64: - return "tdm" - case ArrowTypeId.time32: - if let time32 = self as? ArrowTypeTime32 { - return try time32.cDataFormatId - } - return "tts" - case ArrowTypeId.time64: - if let time64 = self as? ArrowTypeTime64 { - return try time64.cDataFormatId - } - return "ttu" - case ArrowTypeId.binary: - return "z" - case ArrowTypeId.string: - return "u" - default: - throw ArrowError.notImplemented - } - } - } - - public static func fromCDataFormatId( // swiftlint:disable:this cyclomatic_complexity - _ from: String) throws -> ArrowType { - if from == "c" { - return ArrowType(ArrowType.ArrowInt8) - } else if from == "s" { - return ArrowType(ArrowType.ArrowInt16) - } else if from == "i" { - return ArrowType(ArrowType.ArrowInt32) - } else if from == "l" { - return ArrowType(ArrowType.ArrowInt64) - } else if from == "C" { - return ArrowType(ArrowType.ArrowUInt8) - } else if from == "S" { - return ArrowType(ArrowType.ArrowUInt16) - } else if from == "I" { - return ArrowType(ArrowType.ArrowUInt32) - } else if from == "L" { - return ArrowType(ArrowType.ArrowUInt64) - } else if from == "f" { - return ArrowType(ArrowType.ArrowFloat) - } else if from == "g" { - return ArrowType(ArrowType.ArrowDouble) - } else if from == "b" { - return ArrowType(ArrowType.ArrowBool) - } else if from == "tdD" { - return ArrowType(ArrowType.ArrowDate32) - } else if from == "tdm" { - return ArrowType(ArrowType.ArrowDate64) - } else if from == "tts" { - return ArrowTypeTime32(.seconds) - } else if from == "ttm" { - return ArrowTypeTime32(.milliseconds) - } else if from == "ttu" { - return ArrowTypeTime64(.microseconds) - } else if from == "ttn" { - return ArrowTypeTime64(.nanoseconds) - } else if from == "z" { - return ArrowType(ArrowType.ArrowBinary) - } else if from == "u" { - return ArrowType(ArrowType.ArrowString) - } - - throw ArrowError.notImplemented - } -} - -extension ArrowType.Info: Equatable { - public static func == (lhs: ArrowType.Info, rhs: ArrowType.Info) -> Bool { - switch(lhs, rhs) { - case (.primitiveInfo(let lhsId), .primitiveInfo(let rhsId)): - return lhsId == rhsId - case (.variableInfo(let lhsId), .variableInfo(let rhsId)): - return lhsId == rhsId - case (.timeInfo(let lhsId), .timeInfo(let rhsId)): - return lhsId == rhsId - case (.complexInfo(let lhsId), .complexInfo(let rhsId)): - return lhsId == rhsId - default: - return false - } - } -} - -func getBytesFor(_ data: T) -> Data? { - if let temp = data as? String { - return temp.data(using: .utf8) - } else if T.self == Data.self { - return data as? Data - } else { - return nil - } -} -// swiftlint:disable:this file_length diff --git a/swift/Arrow/Sources/Arrow/ArrowWriter.swift b/swift/Arrow/Sources/Arrow/ArrowWriter.swift deleted file mode 100644 index 3aa25b62b49..00000000000 --- a/swift/Arrow/Sources/Arrow/ArrowWriter.swift +++ /dev/null @@ -1,434 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -import Foundation -import FlatBuffers - -public protocol DataWriter { - var count: Int {get} - func append(_ data: Data) -} - -public class ArrowWriter { // swiftlint:disable:this type_body_length - public class InMemDataWriter: DataWriter { - public private(set) var data: Data - public var count: Int { return data.count } - public init(_ data: Data) { - self.data = data - } - convenience init() { - self.init(Data()) - } - - public func append(_ data: Data) { - self.data.append(data) - } - } - - public class FileDataWriter: DataWriter { - private var handle: FileHandle - private var currentSize: Int = 0 - public var count: Int { return currentSize } - public init(_ handle: FileHandle) { - self.handle = handle - } - - public func append(_ data: Data) { - self.handle.write(data) - self.currentSize += data.count - } - } - - public class Info { - public let type: org_apache_arrow_flatbuf_MessageHeader - public let schema: ArrowSchema - public let batches: [RecordBatch] - public init(_ type: org_apache_arrow_flatbuf_MessageHeader, schema: ArrowSchema, batches: [RecordBatch]) { - self.type = type - self.schema = schema - self.batches = batches - } - - public convenience init(_ type: org_apache_arrow_flatbuf_MessageHeader, schema: ArrowSchema) { - self.init(type, schema: schema, batches: [RecordBatch]()) - } - } - - public init() {} - - private func writeField(_ fbb: inout FlatBufferBuilder, field: ArrowField) -> Result { - var fieldsOffset: Offset? - if let nestedField = field.type as? ArrowNestedType { - var offsets = [Offset]() - for field in nestedField.fields { - switch writeField(&fbb, field: field) { - case .success(let offset): - offsets.append(offset) - case .failure(let error): - return .failure(error) - } - } - - fieldsOffset = fbb.createVector(ofOffsets: offsets) - } - - let nameOffset = fbb.create(string: field.name) - let fieldTypeOffsetResult = toFBType(&fbb, arrowType: field.type) - let startOffset = org_apache_arrow_flatbuf_Field.startField(&fbb) - org_apache_arrow_flatbuf_Field.add(name: nameOffset, &fbb) - org_apache_arrow_flatbuf_Field.add(nullable: field.isNullable, &fbb) - if let childrenOffset = fieldsOffset { - org_apache_arrow_flatbuf_Field.addVectorOf(children: childrenOffset, &fbb) - } - - switch toFBTypeEnum(field.type) { - case .success(let type): - org_apache_arrow_flatbuf_Field.add(typeType: type, &fbb) - case .failure(let error): - return .failure(error) - } - - switch fieldTypeOffsetResult { - case .success(let offset): - org_apache_arrow_flatbuf_Field.add(type: offset, &fbb) - return .success(org_apache_arrow_flatbuf_Field.endField(&fbb, start: startOffset)) - case .failure(let error): - return .failure(error) - } - } - - private func writeSchema(_ fbb: inout FlatBufferBuilder, schema: ArrowSchema) -> Result { - var fieldOffsets = [Offset]() - for field in schema.fields { - switch writeField(&fbb, field: field) { - case .success(let offset): - fieldOffsets.append(offset) - case .failure(let error): - return .failure(error) - } - } - - let fieldsOffset: Offset = fbb.createVector(ofOffsets: fieldOffsets) - let schemaOffset = - org_apache_arrow_flatbuf_Schema.createSchema(&fbb, - endianness: .little, - fieldsVectorOffset: fieldsOffset) - return .success(schemaOffset) - - } - - private func writeRecordBatches( - _ writer: inout DataWriter, - batches: [RecordBatch] - ) -> Result<[org_apache_arrow_flatbuf_Block], ArrowError> { - var rbBlocks = [org_apache_arrow_flatbuf_Block]() - - for batch in batches { - let startIndex = writer.count - switch writeRecordBatch(batch: batch) { - case .success(let rbResult): - withUnsafeBytes(of: CONTINUATIONMARKER.littleEndian) {writer.append(Data($0))} - withUnsafeBytes(of: rbResult.1.o.littleEndian) {writer.append(Data($0))} - writer.append(rbResult.0) - switch writeRecordBatchData(&writer, fields: batch.schema.fields, columns: batch.columns) { - case .success: - rbBlocks.append( - org_apache_arrow_flatbuf_Block(offset: Int64(startIndex), - metaDataLength: Int32(0), - bodyLength: Int64(rbResult.1.o))) - case .failure(let error): - return .failure(error) - } - case .failure(let error): - return .failure(error) - } - } - - return .success(rbBlocks) - } - - private func writeFieldNodes(_ fields: [ArrowField], columns: [ArrowArrayHolder], offsets: inout [Offset], - fbb: inout FlatBufferBuilder) { - for index in (0 ..< fields.count).reversed() { - let column = columns[index] - let fieldNode = - org_apache_arrow_flatbuf_FieldNode(length: Int64(column.length), - nullCount: Int64(column.nullCount)) - offsets.append(fbb.create(struct: fieldNode)) - if let nestedType = column.type as? ArrowNestedType { - let structArray = column.array as? StructArray - writeFieldNodes(nestedType.fields, columns: structArray!.arrowFields!, offsets: &offsets, fbb: &fbb) - } - } - } - - private func writeBufferInfo(_ fields: [ArrowField], - columns: [ArrowArrayHolder], - bufferOffset: inout Int, - buffers: inout [org_apache_arrow_flatbuf_Buffer], - fbb: inout FlatBufferBuilder) { - for index in 0 ..< fields.count { - let column = columns[index] - let colBufferDataSizes = column.getBufferDataSizes() - for var bufferDataSize in colBufferDataSizes { - bufferDataSize = getPadForAlignment(bufferDataSize) - let buffer = org_apache_arrow_flatbuf_Buffer(offset: Int64(bufferOffset), length: Int64(bufferDataSize)) - buffers.append(buffer) - bufferOffset += bufferDataSize - if let nestedType = column.type as? ArrowNestedType { - let structArray = column.array as? StructArray - writeBufferInfo(nestedType.fields, columns: structArray!.arrowFields!, - bufferOffset: &bufferOffset, buffers: &buffers, fbb: &fbb) - } - } - } - } - - private func writeRecordBatch(batch: RecordBatch) -> Result<(Data, Offset), ArrowError> { - let schema = batch.schema - var fbb = FlatBufferBuilder() - - // write out field nodes - var fieldNodeOffsets = [Offset]() - fbb.startVector(schema.fields.count, elementSize: MemoryLayout.size) - writeFieldNodes(schema.fields, columns: batch.columns, offsets: &fieldNodeOffsets, fbb: &fbb) - let nodeOffset = fbb.endVector(len: fieldNodeOffsets.count) - - // write out buffers - var buffers = [org_apache_arrow_flatbuf_Buffer]() - var bufferOffset = Int(0) - writeBufferInfo(schema.fields, columns: batch.columns, - bufferOffset: &bufferOffset, buffers: &buffers, - fbb: &fbb) - org_apache_arrow_flatbuf_RecordBatch.startVectorOfBuffers(batch.schema.fields.count, in: &fbb) - for buffer in buffers.reversed() { - fbb.create(struct: buffer) - } - - let batchBuffersOffset = fbb.endVector(len: buffers.count) - let startRb = org_apache_arrow_flatbuf_RecordBatch.startRecordBatch(&fbb) - org_apache_arrow_flatbuf_RecordBatch.addVectorOf(nodes: nodeOffset, &fbb) - org_apache_arrow_flatbuf_RecordBatch.addVectorOf(buffers: batchBuffersOffset, &fbb) - org_apache_arrow_flatbuf_RecordBatch.add(length: Int64(batch.length), &fbb) - let recordBatchOffset = org_apache_arrow_flatbuf_RecordBatch.endRecordBatch(&fbb, start: startRb) - let bodySize = Int64(bufferOffset) - let startMessage = org_apache_arrow_flatbuf_Message.startMessage(&fbb) - org_apache_arrow_flatbuf_Message.add(version: .max, &fbb) - org_apache_arrow_flatbuf_Message.add(bodyLength: Int64(bodySize), &fbb) - org_apache_arrow_flatbuf_Message.add(headerType: .recordbatch, &fbb) - org_apache_arrow_flatbuf_Message.add(header: recordBatchOffset, &fbb) - let messageOffset = org_apache_arrow_flatbuf_Message.endMessage(&fbb, start: startMessage) - fbb.finish(offset: messageOffset) - return .success((fbb.data, Offset(offset: UInt32(fbb.data.count)))) - } - - private func writeRecordBatchData( - _ writer: inout DataWriter, fields: [ArrowField], - columns: [ArrowArrayHolder]) - -> Result { - for index in 0 ..< fields.count { - let column = columns[index] - let colBufferData = column.getBufferData() - for var bufferData in colBufferData { - addPadForAlignment(&bufferData) - writer.append(bufferData) - if let nestedType = column.type as? ArrowNestedType { - guard let structArray = column.array as? StructArray else { - return .failure(.invalid("Struct type array expected for nested type")) - } - - switch writeRecordBatchData(&writer, fields: nestedType.fields, columns: structArray.arrowFields!) { - case .success: - continue - case .failure(let error): - return .failure(error) - } - } - } - } - - return .success(true) - } - - private func writeFooter(schema: ArrowSchema, - rbBlocks: [org_apache_arrow_flatbuf_Block] - ) -> Result { - var fbb: FlatBufferBuilder = FlatBufferBuilder() - switch writeSchema(&fbb, schema: schema) { - case .success(let schemaOffset): - fbb.startVector(rbBlocks.count, elementSize: MemoryLayout.size) - for blkInfo in rbBlocks.reversed() { - fbb.create(struct: blkInfo) - } - - let rbBlkEnd = fbb.endVector(len: rbBlocks.count) - let footerStartOffset = org_apache_arrow_flatbuf_Footer.startFooter(&fbb) - org_apache_arrow_flatbuf_Footer.add(schema: schemaOffset, &fbb) - org_apache_arrow_flatbuf_Footer.addVectorOf(recordBatches: rbBlkEnd, &fbb) - let footerOffset = org_apache_arrow_flatbuf_Footer.endFooter(&fbb, start: footerStartOffset) - fbb.finish(offset: footerOffset) - return .success(fbb.data) - case .failure(let error): - return .failure(error) - } - } - - private func writeFile(_ writer: inout DataWriter, info: ArrowWriter.Info) -> Result { - var fbb: FlatBufferBuilder = FlatBufferBuilder() - switch writeSchema(&fbb, schema: info.schema) { - case .success(let schemaOffset): - fbb.finish(offset: schemaOffset) - writer.append(fbb.data) - case .failure(let error): - return .failure(error) - } - - switch writeRecordBatches(&writer, batches: info.batches) { - case .success(let rbBlocks): - switch writeFooter(schema: info.schema, rbBlocks: rbBlocks) { - case .success(let footerData): - fbb.finish(offset: Offset(offset: fbb.buffer.size)) - let footerOffset = writer.count - writer.append(footerData) - addPadForAlignment(&writer) - - withUnsafeBytes(of: Int32(0).littleEndian) { writer.append(Data($0)) } - let footerDiff = (UInt32(writer.count) - UInt32(footerOffset)) - withUnsafeBytes(of: footerDiff.littleEndian) { writer.append(Data($0)) } - case .failure(let error): - return .failure(error) - } - case .failure(let error): - return .failure(error) - } - - return .success(true) - } - - public func writeStreaming(_ info: ArrowWriter.Info) -> Result { - let writer: any DataWriter = InMemDataWriter() - switch toMessage(info.schema) { - case .success(let schemaData): - withUnsafeBytes(of: CONTINUATIONMARKER.littleEndian) {writer.append(Data($0))} - withUnsafeBytes(of: UInt32(schemaData.count).littleEndian) {writer.append(Data($0))} - writer.append(schemaData) - case .failure(let error): - return .failure(error) - } - - for batch in info.batches { - switch toMessage(batch) { - case .success(let batchData): - withUnsafeBytes(of: CONTINUATIONMARKER.littleEndian) {writer.append(Data($0))} - withUnsafeBytes(of: UInt32(batchData[0].count).littleEndian) {writer.append(Data($0))} - writer.append(batchData[0]) - writer.append(batchData[1]) - case .failure(let error): - return .failure(error) - } - } - - withUnsafeBytes(of: CONTINUATIONMARKER.littleEndian) {writer.append(Data($0))} - withUnsafeBytes(of: UInt32(0).littleEndian) {writer.append(Data($0))} - if let memWriter = writer as? InMemDataWriter { - return .success(memWriter.data) - } else { - return .failure(.invalid("Unable to cast writer")) - } - } - - public func writeFile(_ info: ArrowWriter.Info) -> Result { - var writer: any DataWriter = InMemDataWriter() - switch writeFile(&writer, info: info) { - case .success: - if let memWriter = writer as? InMemDataWriter { - return .success(memWriter.data) - } else { - return .failure(.invalid("Unable to cast writer")) - } - case .failure(let error): - return .failure(error) - } - } - - public func toFile(_ fileName: URL, info: ArrowWriter.Info) -> Result { - do { - try Data().write(to: fileName) - } catch { - return .failure(.ioError("\(error)")) - } - - let fileHandle = FileHandle(forUpdatingAtPath: fileName.path)! - defer { fileHandle.closeFile() } - - var markerData = FILEMARKER.data(using: .utf8)! - addPadForAlignment(&markerData) - - var writer: any DataWriter = FileDataWriter(fileHandle) - writer.append(FILEMARKER.data(using: .utf8)!) - switch writeFile(&writer, info: info) { - case .success: - writer.append(FILEMARKER.data(using: .utf8)!) - case .failure(let error): - return .failure(error) - } - - return .success(true) - } - - public func toMessage(_ batch: RecordBatch) -> Result<[Data], ArrowError> { - var writer: any DataWriter = InMemDataWriter() - switch writeRecordBatch(batch: batch) { - case .success(let message): - writer.append(message.0) - addPadForAlignment(&writer) - var dataWriter: any DataWriter = InMemDataWriter() - switch writeRecordBatchData(&dataWriter, fields: batch.schema.fields, columns: batch.columns) { - case .success: - return .success([ - (writer as! InMemDataWriter).data, // swiftlint:disable:this force_cast - (dataWriter as! InMemDataWriter).data // swiftlint:disable:this force_cast - ]) - case .failure(let error): - return .failure(error) - } - case .failure(let error): - return .failure(error) - } - } - - public func toMessage(_ schema: ArrowSchema) -> Result { - var schemaSize: Int32 = 0 - var fbb = FlatBufferBuilder() - switch writeSchema(&fbb, schema: schema) { - case .success(let schemaOffset): - schemaSize = Int32(schemaOffset.o) - case .failure(let error): - return .failure(error) - } - - let startMessage = org_apache_arrow_flatbuf_Message.startMessage(&fbb) - org_apache_arrow_flatbuf_Message.add(bodyLength: Int64(0), &fbb) - org_apache_arrow_flatbuf_Message.add(headerType: .schema, &fbb) - org_apache_arrow_flatbuf_Message.add(header: Offset(offset: UOffset(schemaSize)), &fbb) - org_apache_arrow_flatbuf_Message.add(version: .max, &fbb) - let messageOffset = org_apache_arrow_flatbuf_Message.endMessage(&fbb, start: startMessage) - fbb.finish(offset: messageOffset) - return .success(fbb.data) - } -} -// swiftlint:disable:this file_length diff --git a/swift/Arrow/Sources/Arrow/ArrowWriterHelper.swift b/swift/Arrow/Sources/Arrow/ArrowWriterHelper.swift deleted file mode 100644 index 4d63192585f..00000000000 --- a/swift/Arrow/Sources/Arrow/ArrowWriterHelper.swift +++ /dev/null @@ -1,135 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -import Foundation -import FlatBuffers - -extension Data { - func hexEncodedString() -> String { - return map { String(format: "%02hhx", $0) }.joined() - } -} - -func toFBTypeEnum(_ arrowType: ArrowType) -> Result { - let typeId = arrowType.id - switch typeId { - case .int8, .int16, .int32, .int64, .uint8, .uint16, .uint32, .uint64: - return .success(org_apache_arrow_flatbuf_Type_.int) - case .float, .double: - return .success(org_apache_arrow_flatbuf_Type_.floatingpoint) - case .string: - return .success(org_apache_arrow_flatbuf_Type_.utf8) - case .binary: - return .success(org_apache_arrow_flatbuf_Type_.binary) - case .boolean: - return .success(org_apache_arrow_flatbuf_Type_.bool) - case .date32, .date64: - return .success(org_apache_arrow_flatbuf_Type_.date) - case .time32, .time64: - return .success(org_apache_arrow_flatbuf_Type_.time) - case .strct: - return .success(org_apache_arrow_flatbuf_Type_.struct_) - default: - return .failure(.unknownType("Unable to find flatbuf type for Arrow type: \(typeId)")) - } -} - -func toFBType( // swiftlint:disable:this cyclomatic_complexity function_body_length - _ fbb: inout FlatBufferBuilder, - arrowType: ArrowType -) -> Result { - let infoType = arrowType.info - switch arrowType.id { - case .int8, .uint8: - return .success(org_apache_arrow_flatbuf_Int.createInt( - &fbb, bitWidth: 8, isSigned: infoType == ArrowType.ArrowInt8)) - case .int16, .uint16: - return .success(org_apache_arrow_flatbuf_Int.createInt( - &fbb, bitWidth: 16, isSigned: infoType == ArrowType.ArrowInt16)) - case .int32, .uint32: - return .success(org_apache_arrow_flatbuf_Int.createInt( - &fbb, bitWidth: 32, isSigned: infoType == ArrowType.ArrowInt32)) - case .int64, .uint64: - return .success(org_apache_arrow_flatbuf_Int.createInt( - &fbb, bitWidth: 64, isSigned: infoType == ArrowType.ArrowInt64)) - case .float: - return .success(org_apache_arrow_flatbuf_FloatingPoint.createFloatingPoint(&fbb, precision: .single)) - case .double: - return .success(org_apache_arrow_flatbuf_FloatingPoint.createFloatingPoint(&fbb, precision: .double)) - case .string: - return .success(org_apache_arrow_flatbuf_Utf8.endUtf8( - &fbb, start: org_apache_arrow_flatbuf_Utf8.startUtf8(&fbb))) - case .binary: - return .success(org_apache_arrow_flatbuf_Binary.endBinary( - &fbb, start: org_apache_arrow_flatbuf_Binary.startBinary(&fbb))) - case .boolean: - return .success(org_apache_arrow_flatbuf_Bool.endBool( - &fbb, start: org_apache_arrow_flatbuf_Bool.startBool(&fbb))) - case .date32: - let startOffset = org_apache_arrow_flatbuf_Date.startDate(&fbb) - org_apache_arrow_flatbuf_Date.add(unit: .day, &fbb) - return .success(org_apache_arrow_flatbuf_Date.endDate(&fbb, start: startOffset)) - case .date64: - let startOffset = org_apache_arrow_flatbuf_Date.startDate(&fbb) - org_apache_arrow_flatbuf_Date.add(unit: .millisecond, &fbb) - return .success(org_apache_arrow_flatbuf_Date.endDate(&fbb, start: startOffset)) - case .time32: - let startOffset = org_apache_arrow_flatbuf_Time.startTime(&fbb) - if let timeType = arrowType as? ArrowTypeTime32 { - org_apache_arrow_flatbuf_Time.add(unit: timeType.unit == .seconds ? .second : .millisecond, &fbb) - return .success(org_apache_arrow_flatbuf_Time.endTime(&fbb, start: startOffset)) - } - - return .failure(.invalid("Unable to case to Time32")) - case .time64: - let startOffset = org_apache_arrow_flatbuf_Time.startTime(&fbb) - if let timeType = arrowType as? ArrowTypeTime64 { - org_apache_arrow_flatbuf_Time.add(unit: timeType.unit == .microseconds ? .microsecond : .nanosecond, &fbb) - return .success(org_apache_arrow_flatbuf_Time.endTime(&fbb, start: startOffset)) - } - - return .failure(.invalid("Unable to case to Time64")) - case .strct: - let startOffset = org_apache_arrow_flatbuf_Struct_.startStruct_(&fbb) - return .success(org_apache_arrow_flatbuf_Struct_.endStruct_(&fbb, start: startOffset)) - default: - return .failure(.unknownType("Unable to add flatbuf type for Arrow type: \(infoType)")) - } -} - -func addPadForAlignment(_ data: inout Data, alignment: Int = 8) { - let padding = data.count % Int(alignment) - if padding > 0 { - data.append(Data([UInt8](repeating: 0, count: alignment - padding))) - } -} - -func addPadForAlignment(_ writer: inout DataWriter, alignment: Int = 8) { - let padding = writer.count % Int(alignment) - if padding > 0 { - writer.append(Data([UInt8](repeating: 0, count: alignment - padding))) - } -} - -func getPadForAlignment(_ count: Int, alignment: Int = 8) -> Int { - let padding = count % Int(alignment) - if padding > 0 { - return count + (alignment - padding) - } - - return count -} diff --git a/swift/Arrow/Sources/Arrow/BitUtility.swift b/swift/Arrow/Sources/Arrow/BitUtility.swift deleted file mode 100644 index 84edf9889b6..00000000000 --- a/swift/Arrow/Sources/Arrow/BitUtility.swift +++ /dev/null @@ -1,40 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -import Foundation - -class BitUtility { - static func isSet(_ bit: UInt, buffer: ArrowBuffer) -> Bool { - let byteIndex = UInt(bit / 8) - let theByte = buffer.rawPointer.load(fromByteOffset: Int(byteIndex), as: UInt8.self) - return theByte & UInt8(1 << (bit % 8)) > 0 - } - - static func setBit(_ bit: UInt, buffer: ArrowBuffer) { - let byteIndex = UInt(bit / 8) - var theByte = buffer.rawPointer.load(fromByteOffset: Int(byteIndex), as: UInt8.self) - theByte |= UInt8(1 << (bit % 8)) - buffer.rawPointer.storeBytes(of: theByte, toByteOffset: Int(byteIndex), as: UInt8.self) - } - - static func clearBit(_ bit: UInt, buffer: ArrowBuffer) { - let byteIndex = UInt(bit / 8) - var theByte = buffer.rawPointer.load(fromByteOffset: Int(byteIndex), as: UInt8.self) - theByte &= ~(UInt8(1 << (bit % 8))) - buffer.rawPointer.storeBytes(of: theByte, toByteOffset: Int(byteIndex), as: UInt8.self) - } -} diff --git a/swift/Arrow/Sources/Arrow/ChunkedArray.swift b/swift/Arrow/Sources/Arrow/ChunkedArray.swift deleted file mode 100644 index fb5734f64b6..00000000000 --- a/swift/Arrow/Sources/Arrow/ChunkedArray.swift +++ /dev/null @@ -1,149 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -import Foundation - -public protocol AnyArray { - var arrowData: ArrowData {get} - func asAny(_ index: UInt) -> Any? - var length: UInt {get} -} - -public protocol AsString { - func asString(_ index: UInt) -> String -} - -public class ChunkedArrayHolder { - public let type: ArrowType - public let length: UInt - public let nullCount: UInt - public let holder: Any - - public let getBufferData: () -> Result<[Data], ArrowError> - public let getBufferDataSizes: () -> Result<[Int], ArrowError> - public init(_ chunked: ChunkedArray) { // swiftlint:disable:this cyclomatic_complexity - self.holder = chunked - self.length = chunked.length - self.type = chunked.type - self.nullCount = chunked.nullCount - self.getBufferData = {() -> Result<[Data], ArrowError> in - var bufferData = [Data]() - var numBuffers = 2 - switch toFBTypeEnum(chunked.type) { - case .success(let fbType): - if !isFixedPrimitive(fbType) { - numBuffers = 3 - } - case .failure(let error): - return .failure(error) - } - - for _ in 0 ..< numBuffers { - bufferData.append(Data()) - } - - for arrowData in chunked.arrays { - for index in 0 ..< numBuffers { - arrowData.arrowData.buffers[index].append(to: &bufferData[index]) - } - } - - return .success(bufferData) - } - - self.getBufferDataSizes = {() -> Result<[Int], ArrowError> in - var bufferDataSizes = [Int]() - var numBuffers = 2 - - switch toFBTypeEnum(chunked.type) { - case .success(let fbType): - if !isFixedPrimitive(fbType) { - numBuffers = 3 - } - case .failure(let error): - return .failure(error) - } - - for _ in 0 ..< numBuffers { - bufferDataSizes.append(Int(0)) - } - - for arrowData in chunked.arrays { - for index in 0 ..< numBuffers { - bufferDataSizes[index] += Int(arrowData.arrowData.buffers[index].capacity) - } - } - - return .success(bufferDataSizes) - } - } -} - -public class ChunkedArray: AsString { - public let arrays: [ArrowArray] - public let type: ArrowType - public let nullCount: UInt - public let length: UInt - public var arrayCount: UInt {return UInt(self.arrays.count)} - - public init(_ arrays: [ArrowArray]) throws { - if arrays.count == 0 { - throw ArrowError.arrayHasNoElements - } - - self.type = arrays[0].arrowData.type - var len: UInt = 0 - var nullCount: UInt = 0 - for array in arrays { - len += array.length - nullCount += array.nullCount - } - - self.arrays = arrays - self.length = len - self.nullCount = nullCount - } - - public subscript(_ index: UInt) -> T? { - if arrays.count == 0 { - return nil - } - - var localIndex = index - var arrayIndex = 0 - var len: UInt = arrays[arrayIndex].length - while localIndex > (len - 1) { - arrayIndex += 1 - if arrayIndex > arrays.count { - return nil - } - - localIndex -= len - len = arrays[arrayIndex].length - } - - return arrays[arrayIndex][localIndex] - } - - public func asString(_ index: UInt) -> String { - if self[index] == nil { - return "" - } - - return "\(self[index]!)" - } -} diff --git a/swift/Arrow/Sources/Arrow/File_generated.swift b/swift/Arrow/Sources/Arrow/File_generated.swift deleted file mode 100644 index 53888e48d60..00000000000 --- a/swift/Arrow/Sources/Arrow/File_generated.swift +++ /dev/null @@ -1,160 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -// automatically generated by the FlatBuffers compiler, do not modify -// swiftlint:disable all -// swiftformat:disable all - -import FlatBuffers - -public struct org_apache_arrow_flatbuf_Block: NativeStruct, Verifiable, FlatbuffersInitializable { - - static func validateVersion() { FlatBuffersVersion_23_1_4() } - - /// Index to the start of the RecordBlock (note this is past the Message header) - private var _offset: Int64 - /// Length of the metadata - private var _metaDataLength: Int32 - private let padding0__: UInt32 = 0 - /// Length of the data (this is aligned so there can be a gap between this and - /// the metadata). - private var _bodyLength: Int64 - - public init(_ bb: ByteBuffer, o: Int32) { - let _accessor = Struct(bb: bb, position: o) - _offset = _accessor.readBuffer(of: Int64.self, at: 0) - _metaDataLength = _accessor.readBuffer(of: Int32.self, at: 8) - _bodyLength = _accessor.readBuffer(of: Int64.self, at: 16) - } - - public init(offset: Int64, metaDataLength: Int32, bodyLength: Int64) { - _offset = offset - _metaDataLength = metaDataLength - _bodyLength = bodyLength - } - - public init() { - _offset = 0 - _metaDataLength = 0 - _bodyLength = 0 - } - - /// Index to the start of the RecordBlock (note this is past the Message header) - public var offset: Int64 { _offset } - /// Length of the metadata - public var metaDataLength: Int32 { _metaDataLength } - /// Length of the data (this is aligned so there can be a gap between this and - /// the metadata). - public var bodyLength: Int64 { _bodyLength } - - public static func verify(_ verifier: inout Verifier, at position: Int, of type: T.Type) throws where T: Verifiable { - try verifier.inBuffer(position: position, of: org_apache_arrow_flatbuf_Block.self) - } -} - -public struct org_apache_arrow_flatbuf_Block_Mutable: FlatBufferObject { - - static func validateVersion() { FlatBuffersVersion_23_1_4() } - public var __buffer: ByteBuffer! { return _accessor.bb } - private var _accessor: Struct - - public init(_ bb: ByteBuffer, o: Int32) { _accessor = Struct(bb: bb, position: o) } - - public var offset: Int64 { return _accessor.readBuffer(of: Int64.self, at: 0) } - public var metaDataLength: Int32 { return _accessor.readBuffer(of: Int32.self, at: 8) } - public var bodyLength: Int64 { return _accessor.readBuffer(of: Int64.self, at: 16) } -} - -/// ---------------------------------------------------------------------- -/// Arrow File metadata -/// -public struct org_apache_arrow_flatbuf_Footer: FlatBufferObject, Verifiable { - - static func validateVersion() { FlatBuffersVersion_23_1_4() } - public var __buffer: ByteBuffer! { return _accessor.bb } - private var _accessor: Table - - public static func getRootAsFooter(bb: ByteBuffer) -> org_apache_arrow_flatbuf_Footer { return org_apache_arrow_flatbuf_Footer(Table(bb: bb, position: Int32(bb.read(def: UOffset.self, position: bb.reader)) + Int32(bb.reader))) } - - private init(_ t: Table) { _accessor = t } - public init(_ bb: ByteBuffer, o: Int32) { _accessor = Table(bb: bb, position: o) } - - private enum VTOFFSET: VOffset { - case version = 4 - case schema = 6 - case dictionaries = 8 - case recordBatches = 10 - case customMetadata = 12 - var v: Int32 { Int32(self.rawValue) } - var p: VOffset { self.rawValue } - } - - public var version: org_apache_arrow_flatbuf_MetadataVersion { let o = _accessor.offset(VTOFFSET.version.v); return o == 0 ? .v1 : org_apache_arrow_flatbuf_MetadataVersion(rawValue: _accessor.readBuffer(of: Int16.self, at: o)) ?? .v1 } - public var schema: org_apache_arrow_flatbuf_Schema? { let o = _accessor.offset(VTOFFSET.schema.v); return o == 0 ? nil : org_apache_arrow_flatbuf_Schema(_accessor.bb, o: _accessor.indirect(o + _accessor.position)) } - public var hasDictionaries: Bool { let o = _accessor.offset(VTOFFSET.dictionaries.v); return o == 0 ? false : true } - public var dictionariesCount: Int32 { let o = _accessor.offset(VTOFFSET.dictionaries.v); return o == 0 ? 0 : _accessor.vector(count: o) } - public func dictionaries(at index: Int32) -> org_apache_arrow_flatbuf_Block? { let o = _accessor.offset(VTOFFSET.dictionaries.v); return o == 0 ? nil : _accessor.directRead(of: org_apache_arrow_flatbuf_Block.self, offset: _accessor.vector(at: o) + index * 24) } - public func mutableDictionaries(at index: Int32) -> org_apache_arrow_flatbuf_Block_Mutable? { let o = _accessor.offset(VTOFFSET.dictionaries.v); return o == 0 ? nil : org_apache_arrow_flatbuf_Block_Mutable(_accessor.bb, o: _accessor.vector(at: o) + index * 24) } - public var hasRecordBatches: Bool { let o = _accessor.offset(VTOFFSET.recordBatches.v); return o == 0 ? false : true } - public var recordBatchesCount: Int32 { let o = _accessor.offset(VTOFFSET.recordBatches.v); return o == 0 ? 0 : _accessor.vector(count: o) } - public func recordBatches(at index: Int32) -> org_apache_arrow_flatbuf_Block? { let o = _accessor.offset(VTOFFSET.recordBatches.v); return o == 0 ? nil : _accessor.directRead(of: org_apache_arrow_flatbuf_Block.self, offset: _accessor.vector(at: o) + index * 24) } - public func mutableRecordBatches(at index: Int32) -> org_apache_arrow_flatbuf_Block_Mutable? { let o = _accessor.offset(VTOFFSET.recordBatches.v); return o == 0 ? nil : org_apache_arrow_flatbuf_Block_Mutable(_accessor.bb, o: _accessor.vector(at: o) + index * 24) } - /// User-defined metadata - public var hasCustomMetadata: Bool { let o = _accessor.offset(VTOFFSET.customMetadata.v); return o == 0 ? false : true } - public var customMetadataCount: Int32 { let o = _accessor.offset(VTOFFSET.customMetadata.v); return o == 0 ? 0 : _accessor.vector(count: o) } - public func customMetadata(at index: Int32) -> org_apache_arrow_flatbuf_KeyValue? { let o = _accessor.offset(VTOFFSET.customMetadata.v); return o == 0 ? nil : org_apache_arrow_flatbuf_KeyValue(_accessor.bb, o: _accessor.indirect(_accessor.vector(at: o) + index * 4)) } - public static func startFooter(_ fbb: inout FlatBufferBuilder) -> UOffset { fbb.startTable(with: 5) } - public static func add(version: org_apache_arrow_flatbuf_MetadataVersion, _ fbb: inout FlatBufferBuilder) { fbb.add(element: version.rawValue, def: 0, at: VTOFFSET.version.p) } - public static func add(schema: Offset, _ fbb: inout FlatBufferBuilder) { fbb.add(offset: schema, at: VTOFFSET.schema.p) } - public static func addVectorOf(dictionaries: Offset, _ fbb: inout FlatBufferBuilder) { fbb.add(offset: dictionaries, at: VTOFFSET.dictionaries.p) } - public static func startVectorOfDictionaries(_ size: Int, in builder: inout FlatBufferBuilder) { - builder.startVector(size * MemoryLayout.size, elementSize: MemoryLayout.alignment) - } - public static func addVectorOf(recordBatches: Offset, _ fbb: inout FlatBufferBuilder) { fbb.add(offset: recordBatches, at: VTOFFSET.recordBatches.p) } - public static func startVectorOfRecordBatches(_ size: Int, in builder: inout FlatBufferBuilder) { - builder.startVector(size * MemoryLayout.size, elementSize: MemoryLayout.alignment) - } - public static func addVectorOf(customMetadata: Offset, _ fbb: inout FlatBufferBuilder) { fbb.add(offset: customMetadata, at: VTOFFSET.customMetadata.p) } - public static func endFooter(_ fbb: inout FlatBufferBuilder, start: UOffset) -> Offset { let end = Offset(offset: fbb.endTable(at: start)); return end } - public static func createFooter( - _ fbb: inout FlatBufferBuilder, - version: org_apache_arrow_flatbuf_MetadataVersion = .v1, - schemaOffset schema: Offset = Offset(), - dictionariesVectorOffset dictionaries: Offset = Offset(), - recordBatchesVectorOffset recordBatches: Offset = Offset(), - customMetadataVectorOffset customMetadata: Offset = Offset() - ) -> Offset { - let __start = org_apache_arrow_flatbuf_Footer.startFooter(&fbb) - org_apache_arrow_flatbuf_Footer.add(version: version, &fbb) - org_apache_arrow_flatbuf_Footer.add(schema: schema, &fbb) - org_apache_arrow_flatbuf_Footer.addVectorOf(dictionaries: dictionaries, &fbb) - org_apache_arrow_flatbuf_Footer.addVectorOf(recordBatches: recordBatches, &fbb) - org_apache_arrow_flatbuf_Footer.addVectorOf(customMetadata: customMetadata, &fbb) - return org_apache_arrow_flatbuf_Footer.endFooter(&fbb, start: __start) - } - - public static func verify(_ verifier: inout Verifier, at position: Int, of type: T.Type) throws where T: Verifiable { - var _v = try verifier.visitTable(at: position) - try _v.visit(field: VTOFFSET.version.p, fieldName: "version", required: false, type: org_apache_arrow_flatbuf_MetadataVersion.self) - try _v.visit(field: VTOFFSET.schema.p, fieldName: "schema", required: false, type: ForwardOffset.self) - try _v.visit(field: VTOFFSET.dictionaries.p, fieldName: "dictionaries", required: false, type: ForwardOffset>.self) - try _v.visit(field: VTOFFSET.recordBatches.p, fieldName: "recordBatches", required: false, type: ForwardOffset>.self) - try _v.visit(field: VTOFFSET.customMetadata.p, fieldName: "customMetadata", required: false, type: ForwardOffset, org_apache_arrow_flatbuf_KeyValue>>.self) - _v.finish() - } -} - diff --git a/swift/Arrow/Sources/Arrow/MemoryAllocator.swift b/swift/Arrow/Sources/Arrow/MemoryAllocator.swift deleted file mode 100644 index 0f6e54ec042..00000000000 --- a/swift/Arrow/Sources/Arrow/MemoryAllocator.swift +++ /dev/null @@ -1,31 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -import Foundation - -public class MemoryAllocator { - let alignment: Int - init(_ alignment: Int) { - self.alignment = alignment - } - - func allocateArray(_ byteCount: Int) -> UnsafeMutableRawPointer { - return UnsafeMutableRawPointer.allocate( - byteCount: byteCount, - alignment: self.alignment) - } -} diff --git a/swift/Arrow/Sources/Arrow/Message_generated.swift b/swift/Arrow/Sources/Arrow/Message_generated.swift deleted file mode 100644 index 6820aa11c42..00000000000 --- a/swift/Arrow/Sources/Arrow/Message_generated.swift +++ /dev/null @@ -1,421 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -// automatically generated by the FlatBuffers compiler, do not modify -// swiftlint:disable all -// swiftformat:disable all - -import FlatBuffers - -public enum org_apache_arrow_flatbuf_CompressionType: Int8, Enum, Verifiable { - public typealias T = Int8 - public static var byteSize: Int { return MemoryLayout.size } - public var value: Int8 { return self.rawValue } - case lz4Frame = 0 - case zstd = 1 - - public static var max: org_apache_arrow_flatbuf_CompressionType { return .zstd } - public static var min: org_apache_arrow_flatbuf_CompressionType { return .lz4Frame } -} - - -/// Provided for forward compatibility in case we need to support different -/// strategies for compressing the IPC message body (like whole-body -/// compression rather than buffer-level) in the future -public enum org_apache_arrow_flatbuf_BodyCompressionMethod: Int8, Enum, Verifiable { - public typealias T = Int8 - public static var byteSize: Int { return MemoryLayout.size } - public var value: Int8 { return self.rawValue } - /// Each constituent buffer is first compressed with the indicated - /// compressor, and then written with the uncompressed length in the first 8 - /// bytes as a 64-bit little-endian signed integer followed by the compressed - /// buffer bytes (and then padding as required by the protocol). The - /// uncompressed length may be set to -1 to indicate that the data that - /// follows is not compressed, which can be useful for cases where - /// compression does not yield appreciable savings. - case buffer = 0 - - public static var max: org_apache_arrow_flatbuf_BodyCompressionMethod { return .buffer } - public static var min: org_apache_arrow_flatbuf_BodyCompressionMethod { return .buffer } -} - - -/// ---------------------------------------------------------------------- -/// The root Message type -/// This union enables us to easily send different message types without -/// redundant storage, and in the future we can easily add new message types. -/// -/// Arrow implementations do not need to implement all of the message types, -/// which may include experimental metadata types. For maximum compatibility, -/// it is best to send data using RecordBatch -public enum org_apache_arrow_flatbuf_MessageHeader: UInt8, UnionEnum { - public typealias T = UInt8 - - public init?(value: T) { - self.init(rawValue: value) - } - - public static var byteSize: Int { return MemoryLayout.size } - public var value: UInt8 { return self.rawValue } - case none_ = 0 - case schema = 1 - case dictionarybatch = 2 - case recordbatch = 3 - case tensor = 4 - case sparsetensor = 5 - - public static var max: org_apache_arrow_flatbuf_MessageHeader { return .sparsetensor } - public static var min: org_apache_arrow_flatbuf_MessageHeader { return .none_ } -} - - -/// ---------------------------------------------------------------------- -/// Data structures for describing a table row batch (a collection of -/// equal-length Arrow arrays) -/// Metadata about a field at some level of a nested type tree (but not -/// its children). -/// -/// For example, a List with values `[[1, 2, 3], null, [4], [5, 6], null]` -/// would have {length: 5, null_count: 2} for its List node, and {length: 6, -/// null_count: 0} for its Int16 node, as separate FieldNode structs -public struct org_apache_arrow_flatbuf_FieldNode: NativeStruct, Verifiable, FlatbuffersInitializable { - - static func validateVersion() { FlatBuffersVersion_23_1_4() } - - /// The number of value slots in the Arrow array at this level of a nested - /// tree - private var _length: Int64 - /// The number of observed nulls. Fields with null_count == 0 may choose not - /// to write their physical validity bitmap out as a materialized buffer, - /// instead setting the length of the bitmap buffer to 0. - private var _nullCount: Int64 - - public init(_ bb: ByteBuffer, o: Int32) { - let _accessor = Struct(bb: bb, position: o) - _length = _accessor.readBuffer(of: Int64.self, at: 0) - _nullCount = _accessor.readBuffer(of: Int64.self, at: 8) - } - - public init(length: Int64, nullCount: Int64) { - _length = length - _nullCount = nullCount - } - - public init() { - _length = 0 - _nullCount = 0 - } - - /// The number of value slots in the Arrow array at this level of a nested - /// tree - public var length: Int64 { _length } - /// The number of observed nulls. Fields with null_count == 0 may choose not - /// to write their physical validity bitmap out as a materialized buffer, - /// instead setting the length of the bitmap buffer to 0. - public var nullCount: Int64 { _nullCount } - - public static func verify(_ verifier: inout Verifier, at position: Int, of type: T.Type) throws where T: Verifiable { - try verifier.inBuffer(position: position, of: org_apache_arrow_flatbuf_FieldNode.self) - } -} - -/// ---------------------------------------------------------------------- -/// Data structures for describing a table row batch (a collection of -/// equal-length Arrow arrays) -/// Metadata about a field at some level of a nested type tree (but not -/// its children). -/// -/// For example, a List with values `[[1, 2, 3], null, [4], [5, 6], null]` -/// would have {length: 5, null_count: 2} for its List node, and {length: 6, -/// null_count: 0} for its Int16 node, as separate FieldNode structs -public struct org_apache_arrow_flatbuf_FieldNode_Mutable: FlatBufferObject { - - static func validateVersion() { FlatBuffersVersion_23_1_4() } - public var __buffer: ByteBuffer! { return _accessor.bb } - private var _accessor: Struct - - public init(_ bb: ByteBuffer, o: Int32) { _accessor = Struct(bb: bb, position: o) } - - public var length: Int64 { return _accessor.readBuffer(of: Int64.self, at: 0) } - public var nullCount: Int64 { return _accessor.readBuffer(of: Int64.self, at: 8) } -} - -/// Optional compression for the memory buffers constituting IPC message -/// bodies. Intended for use with RecordBatch but could be used for other -/// message types -public struct org_apache_arrow_flatbuf_BodyCompression: FlatBufferObject, Verifiable { - - static func validateVersion() { FlatBuffersVersion_23_1_4() } - public var __buffer: ByteBuffer! { return _accessor.bb } - private var _accessor: Table - - public static func getRootAsBodyCompression(bb: ByteBuffer) -> org_apache_arrow_flatbuf_BodyCompression { return org_apache_arrow_flatbuf_BodyCompression(Table(bb: bb, position: Int32(bb.read(def: UOffset.self, position: bb.reader)) + Int32(bb.reader))) } - - private init(_ t: Table) { _accessor = t } - public init(_ bb: ByteBuffer, o: Int32) { _accessor = Table(bb: bb, position: o) } - - private enum VTOFFSET: VOffset { - case codec = 4 - case method = 6 - var v: Int32 { Int32(self.rawValue) } - var p: VOffset { self.rawValue } - } - - /// Compressor library. - /// For LZ4_FRAME, each compressed buffer must consist of a single frame. - public var codec: org_apache_arrow_flatbuf_CompressionType { let o = _accessor.offset(VTOFFSET.codec.v); return o == 0 ? .lz4Frame : org_apache_arrow_flatbuf_CompressionType(rawValue: _accessor.readBuffer(of: Int8.self, at: o)) ?? .lz4Frame } - /// Indicates the way the record batch body was compressed - public var method: org_apache_arrow_flatbuf_BodyCompressionMethod { let o = _accessor.offset(VTOFFSET.method.v); return o == 0 ? .buffer : org_apache_arrow_flatbuf_BodyCompressionMethod(rawValue: _accessor.readBuffer(of: Int8.self, at: o)) ?? .buffer } - public static func startBodyCompression(_ fbb: inout FlatBufferBuilder) -> UOffset { fbb.startTable(with: 2) } - public static func add(codec: org_apache_arrow_flatbuf_CompressionType, _ fbb: inout FlatBufferBuilder) { fbb.add(element: codec.rawValue, def: 0, at: VTOFFSET.codec.p) } - public static func add(method: org_apache_arrow_flatbuf_BodyCompressionMethod, _ fbb: inout FlatBufferBuilder) { fbb.add(element: method.rawValue, def: 0, at: VTOFFSET.method.p) } - public static func endBodyCompression(_ fbb: inout FlatBufferBuilder, start: UOffset) -> Offset { let end = Offset(offset: fbb.endTable(at: start)); return end } - public static func createBodyCompression( - _ fbb: inout FlatBufferBuilder, - codec: org_apache_arrow_flatbuf_CompressionType = .lz4Frame, - method: org_apache_arrow_flatbuf_BodyCompressionMethod = .buffer - ) -> Offset { - let __start = org_apache_arrow_flatbuf_BodyCompression.startBodyCompression(&fbb) - org_apache_arrow_flatbuf_BodyCompression.add(codec: codec, &fbb) - org_apache_arrow_flatbuf_BodyCompression.add(method: method, &fbb) - return org_apache_arrow_flatbuf_BodyCompression.endBodyCompression(&fbb, start: __start) - } - - public static func verify(_ verifier: inout Verifier, at position: Int, of type: T.Type) throws where T: Verifiable { - var _v = try verifier.visitTable(at: position) - try _v.visit(field: VTOFFSET.codec.p, fieldName: "codec", required: false, type: org_apache_arrow_flatbuf_CompressionType.self) - try _v.visit(field: VTOFFSET.method.p, fieldName: "method", required: false, type: org_apache_arrow_flatbuf_BodyCompressionMethod.self) - _v.finish() - } -} - -/// A data header describing the shared memory layout of a "record" or "row" -/// batch. Some systems call this a "row batch" internally and others a "record -/// batch". -public struct org_apache_arrow_flatbuf_RecordBatch: FlatBufferObject, Verifiable { - - static func validateVersion() { FlatBuffersVersion_23_1_4() } - public var __buffer: ByteBuffer! { return _accessor.bb } - private var _accessor: Table - - public static func getRootAsRecordBatch(bb: ByteBuffer) -> org_apache_arrow_flatbuf_RecordBatch { return org_apache_arrow_flatbuf_RecordBatch(Table(bb: bb, position: Int32(bb.read(def: UOffset.self, position: bb.reader)) + Int32(bb.reader))) } - - private init(_ t: Table) { _accessor = t } - public init(_ bb: ByteBuffer, o: Int32) { _accessor = Table(bb: bb, position: o) } - - private enum VTOFFSET: VOffset { - case length = 4 - case nodes = 6 - case buffers = 8 - case compression = 10 - var v: Int32 { Int32(self.rawValue) } - var p: VOffset { self.rawValue } - } - - /// number of records / rows. The arrays in the batch should all have this - /// length - public var length: Int64 { let o = _accessor.offset(VTOFFSET.length.v); return o == 0 ? 0 : _accessor.readBuffer(of: Int64.self, at: o) } - /// Nodes correspond to the pre-ordered flattened logical schema - public var hasNodes: Bool { let o = _accessor.offset(VTOFFSET.nodes.v); return o == 0 ? false : true } - public var nodesCount: Int32 { let o = _accessor.offset(VTOFFSET.nodes.v); return o == 0 ? 0 : _accessor.vector(count: o) } - public func nodes(at index: Int32) -> org_apache_arrow_flatbuf_FieldNode? { let o = _accessor.offset(VTOFFSET.nodes.v); return o == 0 ? nil : _accessor.directRead(of: org_apache_arrow_flatbuf_FieldNode.self, offset: _accessor.vector(at: o) + index * 16) } - public func mutableNodes(at index: Int32) -> org_apache_arrow_flatbuf_FieldNode_Mutable? { let o = _accessor.offset(VTOFFSET.nodes.v); return o == 0 ? nil : org_apache_arrow_flatbuf_FieldNode_Mutable(_accessor.bb, o: _accessor.vector(at: o) + index * 16) } - /// Buffers correspond to the pre-ordered flattened buffer tree - /// - /// The number of buffers appended to this list depends on the schema. For - /// example, most primitive arrays will have 2 buffers, 1 for the validity - /// bitmap and 1 for the values. For struct arrays, there will only be a - /// single buffer for the validity (nulls) bitmap - public var hasBuffers: Bool { let o = _accessor.offset(VTOFFSET.buffers.v); return o == 0 ? false : true } - public var buffersCount: Int32 { let o = _accessor.offset(VTOFFSET.buffers.v); return o == 0 ? 0 : _accessor.vector(count: o) } - public func buffers(at index: Int32) -> org_apache_arrow_flatbuf_Buffer? { let o = _accessor.offset(VTOFFSET.buffers.v); return o == 0 ? nil : _accessor.directRead(of: org_apache_arrow_flatbuf_Buffer.self, offset: _accessor.vector(at: o) + index * 16) } - public func mutableBuffers(at index: Int32) -> org_apache_arrow_flatbuf_Buffer_Mutable? { let o = _accessor.offset(VTOFFSET.buffers.v); return o == 0 ? nil : org_apache_arrow_flatbuf_Buffer_Mutable(_accessor.bb, o: _accessor.vector(at: o) + index * 16) } - /// Optional compression of the message body - public var compression: org_apache_arrow_flatbuf_BodyCompression? { let o = _accessor.offset(VTOFFSET.compression.v); return o == 0 ? nil : org_apache_arrow_flatbuf_BodyCompression(_accessor.bb, o: _accessor.indirect(o + _accessor.position)) } - public static func startRecordBatch(_ fbb: inout FlatBufferBuilder) -> UOffset { fbb.startTable(with: 4) } - public static func add(length: Int64, _ fbb: inout FlatBufferBuilder) { fbb.add(element: length, def: 0, at: VTOFFSET.length.p) } - public static func addVectorOf(nodes: Offset, _ fbb: inout FlatBufferBuilder) { fbb.add(offset: nodes, at: VTOFFSET.nodes.p) } - public static func startVectorOfNodes(_ size: Int, in builder: inout FlatBufferBuilder) { - builder.startVector(size * MemoryLayout.size, elementSize: MemoryLayout.alignment) - } - public static func addVectorOf(buffers: Offset, _ fbb: inout FlatBufferBuilder) { fbb.add(offset: buffers, at: VTOFFSET.buffers.p) } - public static func startVectorOfBuffers(_ size: Int, in builder: inout FlatBufferBuilder) { - builder.startVector(size * MemoryLayout.size, elementSize: MemoryLayout.alignment) - } - public static func add(compression: Offset, _ fbb: inout FlatBufferBuilder) { fbb.add(offset: compression, at: VTOFFSET.compression.p) } - public static func endRecordBatch(_ fbb: inout FlatBufferBuilder, start: UOffset) -> Offset { let end = Offset(offset: fbb.endTable(at: start)); return end } - public static func createRecordBatch( - _ fbb: inout FlatBufferBuilder, - length: Int64 = 0, - nodesVectorOffset nodes: Offset = Offset(), - buffersVectorOffset buffers: Offset = Offset(), - compressionOffset compression: Offset = Offset() - ) -> Offset { - let __start = org_apache_arrow_flatbuf_RecordBatch.startRecordBatch(&fbb) - org_apache_arrow_flatbuf_RecordBatch.add(length: length, &fbb) - org_apache_arrow_flatbuf_RecordBatch.addVectorOf(nodes: nodes, &fbb) - org_apache_arrow_flatbuf_RecordBatch.addVectorOf(buffers: buffers, &fbb) - org_apache_arrow_flatbuf_RecordBatch.add(compression: compression, &fbb) - return org_apache_arrow_flatbuf_RecordBatch.endRecordBatch(&fbb, start: __start) - } - - public static func verify(_ verifier: inout Verifier, at position: Int, of type: T.Type) throws where T: Verifiable { - var _v = try verifier.visitTable(at: position) - try _v.visit(field: VTOFFSET.length.p, fieldName: "length", required: false, type: Int64.self) - try _v.visit(field: VTOFFSET.nodes.p, fieldName: "nodes", required: false, type: ForwardOffset>.self) - try _v.visit(field: VTOFFSET.buffers.p, fieldName: "buffers", required: false, type: ForwardOffset>.self) - try _v.visit(field: VTOFFSET.compression.p, fieldName: "compression", required: false, type: ForwardOffset.self) - _v.finish() - } -} - -/// For sending dictionary encoding information. Any Field can be -/// dictionary-encoded, but in this case none of its children may be -/// dictionary-encoded. -/// There is one vector / column per dictionary, but that vector / column -/// may be spread across multiple dictionary batches by using the isDelta -/// flag -public struct org_apache_arrow_flatbuf_DictionaryBatch: FlatBufferObject, Verifiable { - - static func validateVersion() { FlatBuffersVersion_23_1_4() } - public var __buffer: ByteBuffer! { return _accessor.bb } - private var _accessor: Table - - public static func getRootAsDictionaryBatch(bb: ByteBuffer) -> org_apache_arrow_flatbuf_DictionaryBatch { return org_apache_arrow_flatbuf_DictionaryBatch(Table(bb: bb, position: Int32(bb.read(def: UOffset.self, position: bb.reader)) + Int32(bb.reader))) } - - private init(_ t: Table) { _accessor = t } - public init(_ bb: ByteBuffer, o: Int32) { _accessor = Table(bb: bb, position: o) } - - private enum VTOFFSET: VOffset { - case id = 4 - case data = 6 - case isDelta = 8 - var v: Int32 { Int32(self.rawValue) } - var p: VOffset { self.rawValue } - } - - public var id: Int64 { let o = _accessor.offset(VTOFFSET.id.v); return o == 0 ? 0 : _accessor.readBuffer(of: Int64.self, at: o) } - public var data: org_apache_arrow_flatbuf_RecordBatch? { let o = _accessor.offset(VTOFFSET.data.v); return o == 0 ? nil : org_apache_arrow_flatbuf_RecordBatch(_accessor.bb, o: _accessor.indirect(o + _accessor.position)) } - /// If isDelta is true the values in the dictionary are to be appended to a - /// dictionary with the indicated id. If isDelta is false this dictionary - /// should replace the existing dictionary. - public var isDelta: Bool { let o = _accessor.offset(VTOFFSET.isDelta.v); return o == 0 ? false : 0 != _accessor.readBuffer(of: Byte.self, at: o) } - public static func startDictionaryBatch(_ fbb: inout FlatBufferBuilder) -> UOffset { fbb.startTable(with: 3) } - public static func add(id: Int64, _ fbb: inout FlatBufferBuilder) { fbb.add(element: id, def: 0, at: VTOFFSET.id.p) } - public static func add(data: Offset, _ fbb: inout FlatBufferBuilder) { fbb.add(offset: data, at: VTOFFSET.data.p) } - public static func add(isDelta: Bool, _ fbb: inout FlatBufferBuilder) { fbb.add(element: isDelta, def: false, - at: VTOFFSET.isDelta.p) } - public static func endDictionaryBatch(_ fbb: inout FlatBufferBuilder, start: UOffset) -> Offset { let end = Offset(offset: fbb.endTable(at: start)); return end } - public static func createDictionaryBatch( - _ fbb: inout FlatBufferBuilder, - id: Int64 = 0, - dataOffset data: Offset = Offset(), - isDelta: Bool = false - ) -> Offset { - let __start = org_apache_arrow_flatbuf_DictionaryBatch.startDictionaryBatch(&fbb) - org_apache_arrow_flatbuf_DictionaryBatch.add(id: id, &fbb) - org_apache_arrow_flatbuf_DictionaryBatch.add(data: data, &fbb) - org_apache_arrow_flatbuf_DictionaryBatch.add(isDelta: isDelta, &fbb) - return org_apache_arrow_flatbuf_DictionaryBatch.endDictionaryBatch(&fbb, start: __start) - } - - public static func verify(_ verifier: inout Verifier, at position: Int, of type: T.Type) throws where T: Verifiable { - var _v = try verifier.visitTable(at: position) - try _v.visit(field: VTOFFSET.id.p, fieldName: "id", required: false, type: Int64.self) - try _v.visit(field: VTOFFSET.data.p, fieldName: "data", required: false, type: ForwardOffset.self) - try _v.visit(field: VTOFFSET.isDelta.p, fieldName: "isDelta", required: false, type: Bool.self) - _v.finish() - } -} - -public struct org_apache_arrow_flatbuf_Message: FlatBufferObject, Verifiable { - - static func validateVersion() { FlatBuffersVersion_23_1_4() } - public var __buffer: ByteBuffer! { return _accessor.bb } - private var _accessor: Table - - public static func getRootAsMessage(bb: ByteBuffer) -> org_apache_arrow_flatbuf_Message { return org_apache_arrow_flatbuf_Message(Table(bb: bb, position: Int32(bb.read(def: UOffset.self, position: bb.reader)) + Int32(bb.reader))) } - - private init(_ t: Table) { _accessor = t } - public init(_ bb: ByteBuffer, o: Int32) { _accessor = Table(bb: bb, position: o) } - - private enum VTOFFSET: VOffset { - case version = 4 - case headerType = 6 - case header = 8 - case bodyLength = 10 - case customMetadata = 12 - var v: Int32 { Int32(self.rawValue) } - var p: VOffset { self.rawValue } - } - - public var version: org_apache_arrow_flatbuf_MetadataVersion { let o = _accessor.offset(VTOFFSET.version.v); return o == 0 ? .v1 : org_apache_arrow_flatbuf_MetadataVersion(rawValue: _accessor.readBuffer(of: Int16.self, at: o)) ?? .v1 } - public var headerType: org_apache_arrow_flatbuf_MessageHeader { let o = _accessor.offset(VTOFFSET.headerType.v); return o == 0 ? .none_ : org_apache_arrow_flatbuf_MessageHeader(rawValue: _accessor.readBuffer(of: UInt8.self, at: o)) ?? .none_ } - public func header(type: T.Type) -> T? { let o = _accessor.offset(VTOFFSET.header.v); return o == 0 ? nil : _accessor.union(o) } - public var bodyLength: Int64 { let o = _accessor.offset(VTOFFSET.bodyLength.v); return o == 0 ? 0 : _accessor.readBuffer(of: Int64.self, at: o) } - public var hasCustomMetadata: Bool { let o = _accessor.offset(VTOFFSET.customMetadata.v); return o == 0 ? false : true } - public var customMetadataCount: Int32 { let o = _accessor.offset(VTOFFSET.customMetadata.v); return o == 0 ? 0 : _accessor.vector(count: o) } - public func customMetadata(at index: Int32) -> org_apache_arrow_flatbuf_KeyValue? { let o = _accessor.offset(VTOFFSET.customMetadata.v); return o == 0 ? nil : org_apache_arrow_flatbuf_KeyValue(_accessor.bb, o: _accessor.indirect(_accessor.vector(at: o) + index * 4)) } - public static func startMessage(_ fbb: inout FlatBufferBuilder) -> UOffset { fbb.startTable(with: 5) } - public static func add(version: org_apache_arrow_flatbuf_MetadataVersion, _ fbb: inout FlatBufferBuilder) { fbb.add(element: version.rawValue, def: 0, at: VTOFFSET.version.p) } - public static func add(headerType: org_apache_arrow_flatbuf_MessageHeader, _ fbb: inout FlatBufferBuilder) { fbb.add(element: headerType.rawValue, def: 0, at: VTOFFSET.headerType.p) } - public static func add(header: Offset, _ fbb: inout FlatBufferBuilder) { fbb.add(offset: header, at: VTOFFSET.header.p) } - public static func add(bodyLength: Int64, _ fbb: inout FlatBufferBuilder) { fbb.add(element: bodyLength, def: 0, at: VTOFFSET.bodyLength.p) } - public static func addVectorOf(customMetadata: Offset, _ fbb: inout FlatBufferBuilder) { fbb.add(offset: customMetadata, at: VTOFFSET.customMetadata.p) } - public static func endMessage(_ fbb: inout FlatBufferBuilder, start: UOffset) -> Offset { let end = Offset(offset: fbb.endTable(at: start)); return end } - public static func createMessage( - _ fbb: inout FlatBufferBuilder, - version: org_apache_arrow_flatbuf_MetadataVersion = .v1, - headerType: org_apache_arrow_flatbuf_MessageHeader = .none_, - headerOffset header: Offset = Offset(), - bodyLength: Int64 = 0, - customMetadataVectorOffset customMetadata: Offset = Offset() - ) -> Offset { - let __start = org_apache_arrow_flatbuf_Message.startMessage(&fbb) - org_apache_arrow_flatbuf_Message.add(version: version, &fbb) - org_apache_arrow_flatbuf_Message.add(headerType: headerType, &fbb) - org_apache_arrow_flatbuf_Message.add(header: header, &fbb) - org_apache_arrow_flatbuf_Message.add(bodyLength: bodyLength, &fbb) - org_apache_arrow_flatbuf_Message.addVectorOf(customMetadata: customMetadata, &fbb) - return org_apache_arrow_flatbuf_Message.endMessage(&fbb, start: __start) - } - - public static func verify(_ verifier: inout Verifier, at position: Int, of type: T.Type) throws where T: Verifiable { - var _v = try verifier.visitTable(at: position) - try _v.visit(field: VTOFFSET.version.p, fieldName: "version", required: false, type: org_apache_arrow_flatbuf_MetadataVersion.self) - try _v.visit(unionKey: VTOFFSET.headerType.p, unionField: VTOFFSET.header.p, unionKeyName: "headerType", fieldName: "header", required: false, completion: { (verifier, key: org_apache_arrow_flatbuf_MessageHeader, pos) in - switch key { - case .none_: - break // NOTE - SWIFT doesnt support none - case .schema: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_Schema.self) - case .dictionarybatch: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_DictionaryBatch.self) - case .recordbatch: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_RecordBatch.self) - case .tensor: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_Tensor.self) - case .sparsetensor: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_SparseTensor.self) - } - }) - try _v.visit(field: VTOFFSET.bodyLength.p, fieldName: "bodyLength", required: false, type: Int64.self) - try _v.visit(field: VTOFFSET.customMetadata.p, fieldName: "customMetadata", required: false, type: ForwardOffset, org_apache_arrow_flatbuf_KeyValue>>.self) - _v.finish() - } -} - diff --git a/swift/Arrow/Sources/Arrow/ProtoUtil.swift b/swift/Arrow/Sources/Arrow/ProtoUtil.swift deleted file mode 100644 index 88cfb0bfcde..00000000000 --- a/swift/Arrow/Sources/Arrow/ProtoUtil.swift +++ /dev/null @@ -1,80 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -import Foundation - -func fromProto( // swiftlint:disable:this cyclomatic_complexity function_body_length - field: org_apache_arrow_flatbuf_Field -) -> ArrowField { - let type = field.typeType - var arrowType = ArrowType(ArrowType.ArrowUnknown) - switch type { - case .int: - let intType = field.type(type: org_apache_arrow_flatbuf_Int.self)! - let bitWidth = intType.bitWidth - if bitWidth == 8 { - arrowType = ArrowType(intType.isSigned ? ArrowType.ArrowInt8 : ArrowType.ArrowUInt8) - } else if bitWidth == 16 { - arrowType = ArrowType(intType.isSigned ? ArrowType.ArrowInt16 : ArrowType.ArrowUInt16) - } else if bitWidth == 32 { - arrowType = ArrowType(intType.isSigned ? ArrowType.ArrowInt32 : ArrowType.ArrowUInt32) - } else if bitWidth == 64 { - arrowType = ArrowType(intType.isSigned ? ArrowType.ArrowInt64 : ArrowType.ArrowUInt64) - } - case .bool: - arrowType = ArrowType(ArrowType.ArrowBool) - case .floatingpoint: - let floatType = field.type(type: org_apache_arrow_flatbuf_FloatingPoint.self)! - if floatType.precision == .single { - arrowType = ArrowType(ArrowType.ArrowFloat) - } else if floatType.precision == .double { - arrowType = ArrowType(ArrowType.ArrowDouble) - } - case .utf8: - arrowType = ArrowType(ArrowType.ArrowString) - case .binary: - arrowType = ArrowType(ArrowType.ArrowBinary) - case .date: - let dateType = field.type(type: org_apache_arrow_flatbuf_Date.self)! - if dateType.unit == .day { - arrowType = ArrowType(ArrowType.ArrowDate32) - } else { - arrowType = ArrowType(ArrowType.ArrowDate64) - } - case .time: - let timeType = field.type(type: org_apache_arrow_flatbuf_Time.self)! - if timeType.unit == .second || timeType.unit == .millisecond { - let arrowUnit: ArrowTime32Unit = timeType.unit == .second ? .seconds : .milliseconds - arrowType = ArrowTypeTime32(arrowUnit) - } else { - let arrowUnit: ArrowTime64Unit = timeType.unit == .microsecond ? .microseconds : .nanoseconds - arrowType = ArrowTypeTime64(arrowUnit) - } - case .struct_: - var children = [ArrowField]() - for index in 0...size } - public var value: Int16 { return self.rawValue } - /// 0.1.0 (October 2016). - case v1 = 0 - /// 0.2.0 (February 2017). Non-backwards compatible with V1. - case v2 = 1 - /// 0.3.0 -> 0.7.1 (May - December 2017). Non-backwards compatible with V2. - case v3 = 2 - /// >= 0.8.0 (December 2017). Non-backwards compatible with V3. - case v4 = 3 - /// >= 1.0.0 (July 2020. Backwards compatible with V4 (V5 readers can read V4 - /// metadata and IPC messages). Implementations are recommended to provide a - /// V4 compatibility mode with V5 format changes disabled. - /// - /// Incompatible changes between V4 and V5: - /// - Union buffer layout has changed. In V5, Unions don't have a validity - /// bitmap buffer. - case v5 = 4 - - public static var max: org_apache_arrow_flatbuf_MetadataVersion { return .v5 } - public static var min: org_apache_arrow_flatbuf_MetadataVersion { return .v1 } -} - - -/// Represents Arrow Features that might not have full support -/// within implementations. This is intended to be used in -/// two scenarios: -/// 1. A mechanism for readers of Arrow Streams -/// and files to understand that the stream or file makes -/// use of a feature that isn't supported or unknown to -/// the implementation (and therefore can meet the Arrow -/// forward compatibility guarantees). -/// 2. A means of negotiating between a client and server -/// what features a stream is allowed to use. The enums -/// values here are intented to represent higher level -/// features, additional details maybe negotiated -/// with key-value pairs specific to the protocol. -/// -/// Enums added to this list should be assigned power-of-two values -/// to facilitate exchanging and comparing bitmaps for supported -/// features. -public enum org_apache_arrow_flatbuf_Feature: Int64, Enum, Verifiable { - public typealias T = Int64 - public static var byteSize: Int { return MemoryLayout.size } - public var value: Int64 { return self.rawValue } - /// Needed to make flatbuffers happy. - case unused = 0 - /// The stream makes use of multiple full dictionaries with the - /// same ID and assumes clients implement dictionary replacement - /// correctly. - case dictionaryReplacement = 1 - /// The stream makes use of compressed bodies as described - /// in Message.fbs. - case compressedBody = 2 - - public static var max: org_apache_arrow_flatbuf_Feature { return .compressedBody } - public static var min: org_apache_arrow_flatbuf_Feature { return .unused } -} - - -public enum org_apache_arrow_flatbuf_UnionMode: Int16, Enum, Verifiable { - public typealias T = Int16 - public static var byteSize: Int { return MemoryLayout.size } - public var value: Int16 { return self.rawValue } - case sparse = 0 - case dense = 1 - - public static var max: org_apache_arrow_flatbuf_UnionMode { return .dense } - public static var min: org_apache_arrow_flatbuf_UnionMode { return .sparse } -} - - -public enum org_apache_arrow_flatbuf_Precision: Int16, Enum, Verifiable { - public typealias T = Int16 - public static var byteSize: Int { return MemoryLayout.size } - public var value: Int16 { return self.rawValue } - case half = 0 - case single = 1 - case double = 2 - - public static var max: org_apache_arrow_flatbuf_Precision { return .double } - public static var min: org_apache_arrow_flatbuf_Precision { return .half } -} - - -public enum org_apache_arrow_flatbuf_DateUnit: Int16, Enum, Verifiable { - public typealias T = Int16 - public static var byteSize: Int { return MemoryLayout.size } - public var value: Int16 { return self.rawValue } - case day = 0 - case millisecond = 1 - - public static var max: org_apache_arrow_flatbuf_DateUnit { return .millisecond } - public static var min: org_apache_arrow_flatbuf_DateUnit { return .day } -} - - -public enum org_apache_arrow_flatbuf_TimeUnit: Int16, Enum, Verifiable { - public typealias T = Int16 - public static var byteSize: Int { return MemoryLayout.size } - public var value: Int16 { return self.rawValue } - case second = 0 - case millisecond = 1 - case microsecond = 2 - case nanosecond = 3 - - public static var max: org_apache_arrow_flatbuf_TimeUnit { return .nanosecond } - public static var min: org_apache_arrow_flatbuf_TimeUnit { return .second } -} - - -public enum org_apache_arrow_flatbuf_IntervalUnit: Int16, Enum, Verifiable { - public typealias T = Int16 - public static var byteSize: Int { return MemoryLayout.size } - public var value: Int16 { return self.rawValue } - case yearMonth = 0 - case dayTime = 1 - case monthDayNano = 2 - - public static var max: org_apache_arrow_flatbuf_IntervalUnit { return .monthDayNano } - public static var min: org_apache_arrow_flatbuf_IntervalUnit { return .yearMonth } -} - - -/// ---------------------------------------------------------------------- -/// Top-level Type value, enabling extensible type-specific metadata. We can -/// add new logical types to Type without breaking backwards compatibility -public enum org_apache_arrow_flatbuf_Type_: UInt8, UnionEnum { - public typealias T = UInt8 - - public init?(value: T) { - self.init(rawValue: value) - } - - public static var byteSize: Int { return MemoryLayout.size } - public var value: UInt8 { return self.rawValue } - case none_ = 0 - case null = 1 - case int = 2 - case floatingpoint = 3 - case binary = 4 - case utf8 = 5 - case bool = 6 - case decimal = 7 - case date = 8 - case time = 9 - case timestamp = 10 - case interval = 11 - case list = 12 - case struct_ = 13 - case union = 14 - case fixedsizebinary = 15 - case fixedsizelist = 16 - case map = 17 - case duration = 18 - case largebinary = 19 - case largeutf8 = 20 - case largelist = 21 - case runendencoded = 22 - - public static var max: org_apache_arrow_flatbuf_Type_ { return .runendencoded } - public static var min: org_apache_arrow_flatbuf_Type_ { return .none_ } -} - - -/// ---------------------------------------------------------------------- -/// Dictionary encoding metadata -/// Maintained for forwards compatibility, in the future -/// Dictionaries might be explicit maps between integers and values -/// allowing for non-contiguous index values -public enum org_apache_arrow_flatbuf_DictionaryKind: Int16, Enum, Verifiable { - public typealias T = Int16 - public static var byteSize: Int { return MemoryLayout.size } - public var value: Int16 { return self.rawValue } - case densearray = 0 - - public static var max: org_apache_arrow_flatbuf_DictionaryKind { return .densearray } - public static var min: org_apache_arrow_flatbuf_DictionaryKind { return .densearray } -} - - -/// ---------------------------------------------------------------------- -/// Endianness of the platform producing the data -public enum org_apache_arrow_flatbuf_Endianness: Int16, Enum, Verifiable { - public typealias T = Int16 - public static var byteSize: Int { return MemoryLayout.size } - public var value: Int16 { return self.rawValue } - case little = 0 - case big = 1 - - public static var max: org_apache_arrow_flatbuf_Endianness { return .big } - public static var min: org_apache_arrow_flatbuf_Endianness { return .little } -} - - -/// ---------------------------------------------------------------------- -/// A Buffer represents a single contiguous memory segment -public struct org_apache_arrow_flatbuf_Buffer: NativeStruct, Verifiable, FlatbuffersInitializable { - - static func validateVersion() { FlatBuffersVersion_23_1_4() } - - /// The relative offset into the shared memory page where the bytes for this - /// buffer starts - private var _offset: Int64 - /// The absolute length (in bytes) of the memory buffer. The memory is found - /// from offset (inclusive) to offset + length (non-inclusive). When building - /// messages using the encapsulated IPC message, padding bytes may be written - /// after a buffer, but such padding bytes do not need to be accounted for in - /// the size here. - private var _length: Int64 - - public init(_ bb: ByteBuffer, o: Int32) { - let _accessor = Struct(bb: bb, position: o) - _offset = _accessor.readBuffer(of: Int64.self, at: 0) - _length = _accessor.readBuffer(of: Int64.self, at: 8) - } - - public init(offset: Int64, length: Int64) { - _offset = offset - _length = length - } - - public init() { - _offset = 0 - _length = 0 - } - - /// The relative offset into the shared memory page where the bytes for this - /// buffer starts - public var offset: Int64 { _offset } - /// The absolute length (in bytes) of the memory buffer. The memory is found - /// from offset (inclusive) to offset + length (non-inclusive). When building - /// messages using the encapsulated IPC message, padding bytes may be written - /// after a buffer, but such padding bytes do not need to be accounted for in - /// the size here. - public var length: Int64 { _length } - - public static func verify(_ verifier: inout Verifier, at position: Int, of type: T.Type) throws where T: Verifiable { - try verifier.inBuffer(position: position, of: org_apache_arrow_flatbuf_Buffer.self) - } -} - -/// ---------------------------------------------------------------------- -/// A Buffer represents a single contiguous memory segment -public struct org_apache_arrow_flatbuf_Buffer_Mutable: FlatBufferObject { - - static func validateVersion() { FlatBuffersVersion_23_1_4() } - public var __buffer: ByteBuffer! { return _accessor.bb } - private var _accessor: Struct - - public init(_ bb: ByteBuffer, o: Int32) { _accessor = Struct(bb: bb, position: o) } - - public var offset: Int64 { return _accessor.readBuffer(of: Int64.self, at: 0) } - public var length: Int64 { return _accessor.readBuffer(of: Int64.self, at: 8) } -} - -/// These are stored in the flatbuffer in the Type union below -public struct org_apache_arrow_flatbuf_Null: FlatBufferObject, Verifiable { - - static func validateVersion() { FlatBuffersVersion_23_1_4() } - public var __buffer: ByteBuffer! { return _accessor.bb } - private var _accessor: Table - - public static func getRootAsNull(bb: ByteBuffer) -> org_apache_arrow_flatbuf_Null { return org_apache_arrow_flatbuf_Null(Table(bb: bb, position: Int32(bb.read(def: UOffset.self, position: bb.reader)) + Int32(bb.reader))) } - - private init(_ t: Table) { _accessor = t } - public init(_ bb: ByteBuffer, o: Int32) { _accessor = Table(bb: bb, position: o) } - - public static func startNull(_ fbb: inout FlatBufferBuilder) -> UOffset { fbb.startTable(with: 0) } - public static func endNull(_ fbb: inout FlatBufferBuilder, start: UOffset) -> Offset { let end = Offset(offset: fbb.endTable(at: start)); return end } - - public static func verify(_ verifier: inout Verifier, at position: Int, of type: T.Type) throws where T: Verifiable { - var _v = try verifier.visitTable(at: position) - _v.finish() - } -} - -/// A Struct_ in the flatbuffer metadata is the same as an Arrow Struct -/// (according to the physical memory layout). We used Struct_ here as -/// Struct is a reserved word in Flatbuffers -public struct org_apache_arrow_flatbuf_Struct_: FlatBufferObject, Verifiable { - - static func validateVersion() { FlatBuffersVersion_23_1_4() } - public var __buffer: ByteBuffer! { return _accessor.bb } - private var _accessor: Table - - public static func getRootAsStruct_(bb: ByteBuffer) -> org_apache_arrow_flatbuf_Struct_ { return org_apache_arrow_flatbuf_Struct_(Table(bb: bb, position: Int32(bb.read(def: UOffset.self, position: bb.reader)) + Int32(bb.reader))) } - - private init(_ t: Table) { _accessor = t } - public init(_ bb: ByteBuffer, o: Int32) { _accessor = Table(bb: bb, position: o) } - - public static func startStruct_(_ fbb: inout FlatBufferBuilder) -> UOffset { fbb.startTable(with: 0) } - public static func endStruct_(_ fbb: inout FlatBufferBuilder, start: UOffset) -> Offset { let end = Offset(offset: fbb.endTable(at: start)); return end } - - public static func verify(_ verifier: inout Verifier, at position: Int, of type: T.Type) throws where T: Verifiable { - var _v = try verifier.visitTable(at: position) - _v.finish() - } -} - -public struct org_apache_arrow_flatbuf_List: FlatBufferObject, Verifiable { - - static func validateVersion() { FlatBuffersVersion_23_1_4() } - public var __buffer: ByteBuffer! { return _accessor.bb } - private var _accessor: Table - - public static func getRootAsList(bb: ByteBuffer) -> org_apache_arrow_flatbuf_List { return org_apache_arrow_flatbuf_List(Table(bb: bb, position: Int32(bb.read(def: UOffset.self, position: bb.reader)) + Int32(bb.reader))) } - - private init(_ t: Table) { _accessor = t } - public init(_ bb: ByteBuffer, o: Int32) { _accessor = Table(bb: bb, position: o) } - - public static func startList(_ fbb: inout FlatBufferBuilder) -> UOffset { fbb.startTable(with: 0) } - public static func endList(_ fbb: inout FlatBufferBuilder, start: UOffset) -> Offset { let end = Offset(offset: fbb.endTable(at: start)); return end } - - public static func verify(_ verifier: inout Verifier, at position: Int, of type: T.Type) throws where T: Verifiable { - var _v = try verifier.visitTable(at: position) - _v.finish() - } -} - -/// Same as List, but with 64-bit offsets, allowing to represent -/// extremely large data values. -public struct org_apache_arrow_flatbuf_LargeList: FlatBufferObject, Verifiable { - - static func validateVersion() { FlatBuffersVersion_23_1_4() } - public var __buffer: ByteBuffer! { return _accessor.bb } - private var _accessor: Table - - public static func getRootAsLargeList(bb: ByteBuffer) -> org_apache_arrow_flatbuf_LargeList { return org_apache_arrow_flatbuf_LargeList(Table(bb: bb, position: Int32(bb.read(def: UOffset.self, position: bb.reader)) + Int32(bb.reader))) } - - private init(_ t: Table) { _accessor = t } - public init(_ bb: ByteBuffer, o: Int32) { _accessor = Table(bb: bb, position: o) } - - public static func startLargeList(_ fbb: inout FlatBufferBuilder) -> UOffset { fbb.startTable(with: 0) } - public static func endLargeList(_ fbb: inout FlatBufferBuilder, start: UOffset) -> Offset { let end = Offset(offset: fbb.endTable(at: start)); return end } - - public static func verify(_ verifier: inout Verifier, at position: Int, of type: T.Type) throws where T: Verifiable { - var _v = try verifier.visitTable(at: position) - _v.finish() - } -} - -public struct org_apache_arrow_flatbuf_FixedSizeList: FlatBufferObject, Verifiable { - - static func validateVersion() { FlatBuffersVersion_23_1_4() } - public var __buffer: ByteBuffer! { return _accessor.bb } - private var _accessor: Table - - public static func getRootAsFixedSizeList(bb: ByteBuffer) -> org_apache_arrow_flatbuf_FixedSizeList { return org_apache_arrow_flatbuf_FixedSizeList(Table(bb: bb, position: Int32(bb.read(def: UOffset.self, position: bb.reader)) + Int32(bb.reader))) } - - private init(_ t: Table) { _accessor = t } - public init(_ bb: ByteBuffer, o: Int32) { _accessor = Table(bb: bb, position: o) } - - private enum VTOFFSET: VOffset { - case listSize = 4 - var v: Int32 { Int32(self.rawValue) } - var p: VOffset { self.rawValue } - } - - /// Number of list items per value - public var listSize: Int32 { let o = _accessor.offset(VTOFFSET.listSize.v); return o == 0 ? 0 : _accessor.readBuffer(of: Int32.self, at: o) } - public static func startFixedSizeList(_ fbb: inout FlatBufferBuilder) -> UOffset { fbb.startTable(with: 1) } - public static func add(listSize: Int32, _ fbb: inout FlatBufferBuilder) { fbb.add(element: listSize, def: 0, at: VTOFFSET.listSize.p) } - public static func endFixedSizeList(_ fbb: inout FlatBufferBuilder, start: UOffset) -> Offset { let end = Offset(offset: fbb.endTable(at: start)); return end } - public static func createFixedSizeList( - _ fbb: inout FlatBufferBuilder, - listSize: Int32 = 0 - ) -> Offset { - let __start = org_apache_arrow_flatbuf_FixedSizeList.startFixedSizeList(&fbb) - org_apache_arrow_flatbuf_FixedSizeList.add(listSize: listSize, &fbb) - return org_apache_arrow_flatbuf_FixedSizeList.endFixedSizeList(&fbb, start: __start) - } - - public static func verify(_ verifier: inout Verifier, at position: Int, of type: T.Type) throws where T: Verifiable { - var _v = try verifier.visitTable(at: position) - try _v.visit(field: VTOFFSET.listSize.p, fieldName: "listSize", required: false, type: Int32.self) - _v.finish() - } -} - -/// A Map is a logical nested type that is represented as -/// -/// List> -/// -/// In this layout, the keys and values are each respectively contiguous. We do -/// not constrain the key and value types, so the application is responsible -/// for ensuring that the keys are hashable and unique. Whether the keys are sorted -/// may be set in the metadata for this field. -/// -/// In a field with Map type, the field has a child Struct field, which then -/// has two children: key type and the second the value type. The names of the -/// child fields may be respectively "entries", "key", and "value", but this is -/// not enforced. -/// -/// Map -/// ```text -/// - child[0] entries: Struct -/// - child[0] key: K -/// - child[1] value: V -/// ``` -/// Neither the "entries" field nor the "key" field may be nullable. -/// -/// The metadata is structured so that Arrow systems without special handling -/// for Map can make Map an alias for List. The "layout" attribute for the Map -/// field must have the same contents as a List. -public struct org_apache_arrow_flatbuf_Map: FlatBufferObject, Verifiable { - - static func validateVersion() { FlatBuffersVersion_23_1_4() } - public var __buffer: ByteBuffer! { return _accessor.bb } - private var _accessor: Table - - public static func getRootAsMap(bb: ByteBuffer) -> org_apache_arrow_flatbuf_Map { return org_apache_arrow_flatbuf_Map(Table(bb: bb, position: Int32(bb.read(def: UOffset.self, position: bb.reader)) + Int32(bb.reader))) } - - private init(_ t: Table) { _accessor = t } - public init(_ bb: ByteBuffer, o: Int32) { _accessor = Table(bb: bb, position: o) } - - private enum VTOFFSET: VOffset { - case keysSorted = 4 - var v: Int32 { Int32(self.rawValue) } - var p: VOffset { self.rawValue } - } - - /// Set to true if the keys within each value are sorted - public var keysSorted: Bool { let o = _accessor.offset(VTOFFSET.keysSorted.v); return o == 0 ? false : 0 != _accessor.readBuffer(of: Byte.self, at: o) } - public static func startMap(_ fbb: inout FlatBufferBuilder) -> UOffset { fbb.startTable(with: 1) } - public static func add(keysSorted: Bool, _ fbb: inout FlatBufferBuilder) { fbb.add(element: keysSorted, def: false, - at: VTOFFSET.keysSorted.p) } - public static func endMap(_ fbb: inout FlatBufferBuilder, start: UOffset) -> Offset { let end = Offset(offset: fbb.endTable(at: start)); return end } - public static func createMap( - _ fbb: inout FlatBufferBuilder, - keysSorted: Bool = false - ) -> Offset { - let __start = org_apache_arrow_flatbuf_Map.startMap(&fbb) - org_apache_arrow_flatbuf_Map.add(keysSorted: keysSorted, &fbb) - return org_apache_arrow_flatbuf_Map.endMap(&fbb, start: __start) - } - - public static func verify(_ verifier: inout Verifier, at position: Int, of type: T.Type) throws where T: Verifiable { - var _v = try verifier.visitTable(at: position) - try _v.visit(field: VTOFFSET.keysSorted.p, fieldName: "keysSorted", required: false, type: Bool.self) - _v.finish() - } -} - -/// A union is a complex type with children in Field -/// By default ids in the type vector refer to the offsets in the children -/// optionally typeIds provides an indirection between the child offset and the type id -/// for each child `typeIds[offset]` is the id used in the type vector -public struct org_apache_arrow_flatbuf_Union: FlatBufferObject, Verifiable { - - static func validateVersion() { FlatBuffersVersion_23_1_4() } - public var __buffer: ByteBuffer! { return _accessor.bb } - private var _accessor: Table - - public static func getRootAsUnion(bb: ByteBuffer) -> org_apache_arrow_flatbuf_Union { return org_apache_arrow_flatbuf_Union(Table(bb: bb, position: Int32(bb.read(def: UOffset.self, position: bb.reader)) + Int32(bb.reader))) } - - private init(_ t: Table) { _accessor = t } - public init(_ bb: ByteBuffer, o: Int32) { _accessor = Table(bb: bb, position: o) } - - private enum VTOFFSET: VOffset { - case mode = 4 - case typeIds = 6 - var v: Int32 { Int32(self.rawValue) } - var p: VOffset { self.rawValue } - } - - public var mode: org_apache_arrow_flatbuf_UnionMode { let o = _accessor.offset(VTOFFSET.mode.v); return o == 0 ? .sparse : org_apache_arrow_flatbuf_UnionMode(rawValue: _accessor.readBuffer(of: Int16.self, at: o)) ?? .sparse } - public var hasTypeIds: Bool { let o = _accessor.offset(VTOFFSET.typeIds.v); return o == 0 ? false : true } - public var typeIdsCount: Int32 { let o = _accessor.offset(VTOFFSET.typeIds.v); return o == 0 ? 0 : _accessor.vector(count: o) } - public func typeIds(at index: Int32) -> Int32 { let o = _accessor.offset(VTOFFSET.typeIds.v); return o == 0 ? 0 : _accessor.directRead(of: Int32.self, offset: _accessor.vector(at: o) + index * 4) } - public var typeIds: [Int32] { return _accessor.getVector(at: VTOFFSET.typeIds.v) ?? [] } - public static func startUnion(_ fbb: inout FlatBufferBuilder) -> UOffset { fbb.startTable(with: 2) } - public static func add(mode: org_apache_arrow_flatbuf_UnionMode, _ fbb: inout FlatBufferBuilder) { fbb.add(element: mode.rawValue, def: 0, at: VTOFFSET.mode.p) } - public static func addVectorOf(typeIds: Offset, _ fbb: inout FlatBufferBuilder) { fbb.add(offset: typeIds, at: VTOFFSET.typeIds.p) } - public static func endUnion(_ fbb: inout FlatBufferBuilder, start: UOffset) -> Offset { let end = Offset(offset: fbb.endTable(at: start)); return end } - public static func createUnion( - _ fbb: inout FlatBufferBuilder, - mode: org_apache_arrow_flatbuf_UnionMode = .sparse, - typeIdsVectorOffset typeIds: Offset = Offset() - ) -> Offset { - let __start = org_apache_arrow_flatbuf_Union.startUnion(&fbb) - org_apache_arrow_flatbuf_Union.add(mode: mode, &fbb) - org_apache_arrow_flatbuf_Union.addVectorOf(typeIds: typeIds, &fbb) - return org_apache_arrow_flatbuf_Union.endUnion(&fbb, start: __start) - } - - public static func verify(_ verifier: inout Verifier, at position: Int, of type: T.Type) throws where T: Verifiable { - var _v = try verifier.visitTable(at: position) - try _v.visit(field: VTOFFSET.mode.p, fieldName: "mode", required: false, type: org_apache_arrow_flatbuf_UnionMode.self) - try _v.visit(field: VTOFFSET.typeIds.p, fieldName: "typeIds", required: false, type: ForwardOffset>.self) - _v.finish() - } -} - -public struct org_apache_arrow_flatbuf_Int: FlatBufferObject, Verifiable { - - static func validateVersion() { FlatBuffersVersion_23_1_4() } - public var __buffer: ByteBuffer! { return _accessor.bb } - private var _accessor: Table - - public static func getRootAsInt(bb: ByteBuffer) -> org_apache_arrow_flatbuf_Int { return org_apache_arrow_flatbuf_Int(Table(bb: bb, position: Int32(bb.read(def: UOffset.self, position: bb.reader)) + Int32(bb.reader))) } - - private init(_ t: Table) { _accessor = t } - public init(_ bb: ByteBuffer, o: Int32) { _accessor = Table(bb: bb, position: o) } - - private enum VTOFFSET: VOffset { - case bitWidth = 4 - case isSigned = 6 - var v: Int32 { Int32(self.rawValue) } - var p: VOffset { self.rawValue } - } - - public var bitWidth: Int32 { let o = _accessor.offset(VTOFFSET.bitWidth.v); return o == 0 ? 0 : _accessor.readBuffer(of: Int32.self, at: o) } - public var isSigned: Bool { let o = _accessor.offset(VTOFFSET.isSigned.v); return o == 0 ? false : 0 != _accessor.readBuffer(of: Byte.self, at: o) } - public static func startInt(_ fbb: inout FlatBufferBuilder) -> UOffset { fbb.startTable(with: 2) } - public static func add(bitWidth: Int32, _ fbb: inout FlatBufferBuilder) { fbb.add(element: bitWidth, def: 0, at: VTOFFSET.bitWidth.p) } - public static func add(isSigned: Bool, _ fbb: inout FlatBufferBuilder) { fbb.add(element: isSigned, def: false, - at: VTOFFSET.isSigned.p) } - public static func endInt(_ fbb: inout FlatBufferBuilder, start: UOffset) -> Offset { let end = Offset(offset: fbb.endTable(at: start)); return end } - public static func createInt( - _ fbb: inout FlatBufferBuilder, - bitWidth: Int32 = 0, - isSigned: Bool = false - ) -> Offset { - let __start = org_apache_arrow_flatbuf_Int.startInt(&fbb) - org_apache_arrow_flatbuf_Int.add(bitWidth: bitWidth, &fbb) - org_apache_arrow_flatbuf_Int.add(isSigned: isSigned, &fbb) - return org_apache_arrow_flatbuf_Int.endInt(&fbb, start: __start) - } - - public static func verify(_ verifier: inout Verifier, at position: Int, of type: T.Type) throws where T: Verifiable { - var _v = try verifier.visitTable(at: position) - try _v.visit(field: VTOFFSET.bitWidth.p, fieldName: "bitWidth", required: false, type: Int32.self) - try _v.visit(field: VTOFFSET.isSigned.p, fieldName: "isSigned", required: false, type: Bool.self) - _v.finish() - } -} - -public struct org_apache_arrow_flatbuf_FloatingPoint: FlatBufferObject, Verifiable { - - static func validateVersion() { FlatBuffersVersion_23_1_4() } - public var __buffer: ByteBuffer! { return _accessor.bb } - private var _accessor: Table - - public static func getRootAsFloatingPoint(bb: ByteBuffer) -> org_apache_arrow_flatbuf_FloatingPoint { return org_apache_arrow_flatbuf_FloatingPoint(Table(bb: bb, position: Int32(bb.read(def: UOffset.self, position: bb.reader)) + Int32(bb.reader))) } - - private init(_ t: Table) { _accessor = t } - public init(_ bb: ByteBuffer, o: Int32) { _accessor = Table(bb: bb, position: o) } - - private enum VTOFFSET: VOffset { - case precision = 4 - var v: Int32 { Int32(self.rawValue) } - var p: VOffset { self.rawValue } - } - - public var precision: org_apache_arrow_flatbuf_Precision { let o = _accessor.offset(VTOFFSET.precision.v); return o == 0 ? .half : org_apache_arrow_flatbuf_Precision(rawValue: _accessor.readBuffer(of: Int16.self, at: o)) ?? .half } - public static func startFloatingPoint(_ fbb: inout FlatBufferBuilder) -> UOffset { fbb.startTable(with: 1) } - public static func add(precision: org_apache_arrow_flatbuf_Precision, _ fbb: inout FlatBufferBuilder) { fbb.add(element: precision.rawValue, def: 0, at: VTOFFSET.precision.p) } - public static func endFloatingPoint(_ fbb: inout FlatBufferBuilder, start: UOffset) -> Offset { let end = Offset(offset: fbb.endTable(at: start)); return end } - public static func createFloatingPoint( - _ fbb: inout FlatBufferBuilder, - precision: org_apache_arrow_flatbuf_Precision = .half - ) -> Offset { - let __start = org_apache_arrow_flatbuf_FloatingPoint.startFloatingPoint(&fbb) - org_apache_arrow_flatbuf_FloatingPoint.add(precision: precision, &fbb) - return org_apache_arrow_flatbuf_FloatingPoint.endFloatingPoint(&fbb, start: __start) - } - - public static func verify(_ verifier: inout Verifier, at position: Int, of type: T.Type) throws where T: Verifiable { - var _v = try verifier.visitTable(at: position) - try _v.visit(field: VTOFFSET.precision.p, fieldName: "precision", required: false, type: org_apache_arrow_flatbuf_Precision.self) - _v.finish() - } -} - -/// Unicode with UTF-8 encoding -public struct org_apache_arrow_flatbuf_Utf8: FlatBufferObject, Verifiable { - - static func validateVersion() { FlatBuffersVersion_23_1_4() } - public var __buffer: ByteBuffer! { return _accessor.bb } - private var _accessor: Table - - public static func getRootAsUtf8(bb: ByteBuffer) -> org_apache_arrow_flatbuf_Utf8 { return org_apache_arrow_flatbuf_Utf8(Table(bb: bb, position: Int32(bb.read(def: UOffset.self, position: bb.reader)) + Int32(bb.reader))) } - - private init(_ t: Table) { _accessor = t } - public init(_ bb: ByteBuffer, o: Int32) { _accessor = Table(bb: bb, position: o) } - - public static func startUtf8(_ fbb: inout FlatBufferBuilder) -> UOffset { fbb.startTable(with: 0) } - public static func endUtf8(_ fbb: inout FlatBufferBuilder, start: UOffset) -> Offset { let end = Offset(offset: fbb.endTable(at: start)); return end } - - public static func verify(_ verifier: inout Verifier, at position: Int, of type: T.Type) throws where T: Verifiable { - var _v = try verifier.visitTable(at: position) - _v.finish() - } -} - -/// Opaque binary data -public struct org_apache_arrow_flatbuf_Binary: FlatBufferObject, Verifiable { - - static func validateVersion() { FlatBuffersVersion_23_1_4() } - public var __buffer: ByteBuffer! { return _accessor.bb } - private var _accessor: Table - - public static func getRootAsBinary(bb: ByteBuffer) -> org_apache_arrow_flatbuf_Binary { return org_apache_arrow_flatbuf_Binary(Table(bb: bb, position: Int32(bb.read(def: UOffset.self, position: bb.reader)) + Int32(bb.reader))) } - - private init(_ t: Table) { _accessor = t } - public init(_ bb: ByteBuffer, o: Int32) { _accessor = Table(bb: bb, position: o) } - - public static func startBinary(_ fbb: inout FlatBufferBuilder) -> UOffset { fbb.startTable(with: 0) } - public static func endBinary(_ fbb: inout FlatBufferBuilder, start: UOffset) -> Offset { let end = Offset(offset: fbb.endTable(at: start)); return end } - - public static func verify(_ verifier: inout Verifier, at position: Int, of type: T.Type) throws where T: Verifiable { - var _v = try verifier.visitTable(at: position) - _v.finish() - } -} - -/// Same as Utf8, but with 64-bit offsets, allowing to represent -/// extremely large data values. -public struct org_apache_arrow_flatbuf_LargeUtf8: FlatBufferObject, Verifiable { - - static func validateVersion() { FlatBuffersVersion_23_1_4() } - public var __buffer: ByteBuffer! { return _accessor.bb } - private var _accessor: Table - - public static func getRootAsLargeUtf8(bb: ByteBuffer) -> org_apache_arrow_flatbuf_LargeUtf8 { return org_apache_arrow_flatbuf_LargeUtf8(Table(bb: bb, position: Int32(bb.read(def: UOffset.self, position: bb.reader)) + Int32(bb.reader))) } - - private init(_ t: Table) { _accessor = t } - public init(_ bb: ByteBuffer, o: Int32) { _accessor = Table(bb: bb, position: o) } - - public static func startLargeUtf8(_ fbb: inout FlatBufferBuilder) -> UOffset { fbb.startTable(with: 0) } - public static func endLargeUtf8(_ fbb: inout FlatBufferBuilder, start: UOffset) -> Offset { let end = Offset(offset: fbb.endTable(at: start)); return end } - - public static func verify(_ verifier: inout Verifier, at position: Int, of type: T.Type) throws where T: Verifiable { - var _v = try verifier.visitTable(at: position) - _v.finish() - } -} - -/// Same as Binary, but with 64-bit offsets, allowing to represent -/// extremely large data values. -public struct org_apache_arrow_flatbuf_LargeBinary: FlatBufferObject, Verifiable { - - static func validateVersion() { FlatBuffersVersion_23_1_4() } - public var __buffer: ByteBuffer! { return _accessor.bb } - private var _accessor: Table - - public static func getRootAsLargeBinary(bb: ByteBuffer) -> org_apache_arrow_flatbuf_LargeBinary { return org_apache_arrow_flatbuf_LargeBinary(Table(bb: bb, position: Int32(bb.read(def: UOffset.self, position: bb.reader)) + Int32(bb.reader))) } - - private init(_ t: Table) { _accessor = t } - public init(_ bb: ByteBuffer, o: Int32) { _accessor = Table(bb: bb, position: o) } - - public static func startLargeBinary(_ fbb: inout FlatBufferBuilder) -> UOffset { fbb.startTable(with: 0) } - public static func endLargeBinary(_ fbb: inout FlatBufferBuilder, start: UOffset) -> Offset { let end = Offset(offset: fbb.endTable(at: start)); return end } - - public static func verify(_ verifier: inout Verifier, at position: Int, of type: T.Type) throws where T: Verifiable { - var _v = try verifier.visitTable(at: position) - _v.finish() - } -} - -public struct org_apache_arrow_flatbuf_FixedSizeBinary: FlatBufferObject, Verifiable { - - static func validateVersion() { FlatBuffersVersion_23_1_4() } - public var __buffer: ByteBuffer! { return _accessor.bb } - private var _accessor: Table - - public static func getRootAsFixedSizeBinary(bb: ByteBuffer) -> org_apache_arrow_flatbuf_FixedSizeBinary { return org_apache_arrow_flatbuf_FixedSizeBinary(Table(bb: bb, position: Int32(bb.read(def: UOffset.self, position: bb.reader)) + Int32(bb.reader))) } - - private init(_ t: Table) { _accessor = t } - public init(_ bb: ByteBuffer, o: Int32) { _accessor = Table(bb: bb, position: o) } - - private enum VTOFFSET: VOffset { - case byteWidth = 4 - var v: Int32 { Int32(self.rawValue) } - var p: VOffset { self.rawValue } - } - - /// Number of bytes per value - public var byteWidth: Int32 { let o = _accessor.offset(VTOFFSET.byteWidth.v); return o == 0 ? 0 : _accessor.readBuffer(of: Int32.self, at: o) } - public static func startFixedSizeBinary(_ fbb: inout FlatBufferBuilder) -> UOffset { fbb.startTable(with: 1) } - public static func add(byteWidth: Int32, _ fbb: inout FlatBufferBuilder) { fbb.add(element: byteWidth, def: 0, at: VTOFFSET.byteWidth.p) } - public static func endFixedSizeBinary(_ fbb: inout FlatBufferBuilder, start: UOffset) -> Offset { let end = Offset(offset: fbb.endTable(at: start)); return end } - public static func createFixedSizeBinary( - _ fbb: inout FlatBufferBuilder, - byteWidth: Int32 = 0 - ) -> Offset { - let __start = org_apache_arrow_flatbuf_FixedSizeBinary.startFixedSizeBinary(&fbb) - org_apache_arrow_flatbuf_FixedSizeBinary.add(byteWidth: byteWidth, &fbb) - return org_apache_arrow_flatbuf_FixedSizeBinary.endFixedSizeBinary(&fbb, start: __start) - } - - public static func verify(_ verifier: inout Verifier, at position: Int, of type: T.Type) throws where T: Verifiable { - var _v = try verifier.visitTable(at: position) - try _v.visit(field: VTOFFSET.byteWidth.p, fieldName: "byteWidth", required: false, type: Int32.self) - _v.finish() - } -} - -public struct org_apache_arrow_flatbuf_Bool: FlatBufferObject, Verifiable { - - static func validateVersion() { FlatBuffersVersion_23_1_4() } - public var __buffer: ByteBuffer! { return _accessor.bb } - private var _accessor: Table - - public static func getRootAsBool(bb: ByteBuffer) -> org_apache_arrow_flatbuf_Bool { return org_apache_arrow_flatbuf_Bool(Table(bb: bb, position: Int32(bb.read(def: UOffset.self, position: bb.reader)) + Int32(bb.reader))) } - - private init(_ t: Table) { _accessor = t } - public init(_ bb: ByteBuffer, o: Int32) { _accessor = Table(bb: bb, position: o) } - - public static func startBool(_ fbb: inout FlatBufferBuilder) -> UOffset { fbb.startTable(with: 0) } - public static func endBool(_ fbb: inout FlatBufferBuilder, start: UOffset) -> Offset { let end = Offset(offset: fbb.endTable(at: start)); return end } - - public static func verify(_ verifier: inout Verifier, at position: Int, of type: T.Type) throws where T: Verifiable { - var _v = try verifier.visitTable(at: position) - _v.finish() - } -} - -/// Contains two child arrays, run_ends and values. -/// The run_ends child array must be a 16/32/64-bit integer array -/// which encodes the indices at which the run with the value in -/// each corresponding index in the values child array ends. -/// Like list/struct types, the value array can be of any type. -public struct org_apache_arrow_flatbuf_RunEndEncoded: FlatBufferObject, Verifiable { - - static func validateVersion() { FlatBuffersVersion_23_1_4() } - public var __buffer: ByteBuffer! { return _accessor.bb } - private var _accessor: Table - - public static func getRootAsRunEndEncoded(bb: ByteBuffer) -> org_apache_arrow_flatbuf_RunEndEncoded { return org_apache_arrow_flatbuf_RunEndEncoded(Table(bb: bb, position: Int32(bb.read(def: UOffset.self, position: bb.reader)) + Int32(bb.reader))) } - - private init(_ t: Table) { _accessor = t } - public init(_ bb: ByteBuffer, o: Int32) { _accessor = Table(bb: bb, position: o) } - - public static func startRunEndEncoded(_ fbb: inout FlatBufferBuilder) -> UOffset { fbb.startTable(with: 0) } - public static func endRunEndEncoded(_ fbb: inout FlatBufferBuilder, start: UOffset) -> Offset { let end = Offset(offset: fbb.endTable(at: start)); return end } - - public static func verify(_ verifier: inout Verifier, at position: Int, of type: T.Type) throws where T: Verifiable { - var _v = try verifier.visitTable(at: position) - _v.finish() - } -} - -/// Exact decimal value represented as an integer value in two's -/// complement. Currently only 128-bit (16-byte) and 256-bit (32-byte) integers -/// are used. The representation uses the endianness indicated -/// in the Schema. -public struct org_apache_arrow_flatbuf_Decimal: FlatBufferObject, Verifiable { - - static func validateVersion() { FlatBuffersVersion_23_1_4() } - public var __buffer: ByteBuffer! { return _accessor.bb } - private var _accessor: Table - - public static func getRootAsDecimal(bb: ByteBuffer) -> org_apache_arrow_flatbuf_Decimal { return org_apache_arrow_flatbuf_Decimal(Table(bb: bb, position: Int32(bb.read(def: UOffset.self, position: bb.reader)) + Int32(bb.reader))) } - - private init(_ t: Table) { _accessor = t } - public init(_ bb: ByteBuffer, o: Int32) { _accessor = Table(bb: bb, position: o) } - - private enum VTOFFSET: VOffset { - case precision = 4 - case scale = 6 - case bitWidth = 8 - var v: Int32 { Int32(self.rawValue) } - var p: VOffset { self.rawValue } - } - - /// Total number of decimal digits - public var precision: Int32 { let o = _accessor.offset(VTOFFSET.precision.v); return o == 0 ? 0 : _accessor.readBuffer(of: Int32.self, at: o) } - /// Number of digits after the decimal point "." - public var scale: Int32 { let o = _accessor.offset(VTOFFSET.scale.v); return o == 0 ? 0 : _accessor.readBuffer(of: Int32.self, at: o) } - /// Number of bits per value. The only accepted widths are 128 and 256. - /// We use bitWidth for consistency with Int::bitWidth. - public var bitWidth: Int32 { let o = _accessor.offset(VTOFFSET.bitWidth.v); return o == 0 ? 128 : _accessor.readBuffer(of: Int32.self, at: o) } - public static func startDecimal(_ fbb: inout FlatBufferBuilder) -> UOffset { fbb.startTable(with: 3) } - public static func add(precision: Int32, _ fbb: inout FlatBufferBuilder) { fbb.add(element: precision, def: 0, at: VTOFFSET.precision.p) } - public static func add(scale: Int32, _ fbb: inout FlatBufferBuilder) { fbb.add(element: scale, def: 0, at: VTOFFSET.scale.p) } - public static func add(bitWidth: Int32, _ fbb: inout FlatBufferBuilder) { fbb.add(element: bitWidth, def: 128, at: VTOFFSET.bitWidth.p) } - public static func endDecimal(_ fbb: inout FlatBufferBuilder, start: UOffset) -> Offset { let end = Offset(offset: fbb.endTable(at: start)); return end } - public static func createDecimal( - _ fbb: inout FlatBufferBuilder, - precision: Int32 = 0, - scale: Int32 = 0, - bitWidth: Int32 = 128 - ) -> Offset { - let __start = org_apache_arrow_flatbuf_Decimal.startDecimal(&fbb) - org_apache_arrow_flatbuf_Decimal.add(precision: precision, &fbb) - org_apache_arrow_flatbuf_Decimal.add(scale: scale, &fbb) - org_apache_arrow_flatbuf_Decimal.add(bitWidth: bitWidth, &fbb) - return org_apache_arrow_flatbuf_Decimal.endDecimal(&fbb, start: __start) - } - - public static func verify(_ verifier: inout Verifier, at position: Int, of type: T.Type) throws where T: Verifiable { - var _v = try verifier.visitTable(at: position) - try _v.visit(field: VTOFFSET.precision.p, fieldName: "precision", required: false, type: Int32.self) - try _v.visit(field: VTOFFSET.scale.p, fieldName: "scale", required: false, type: Int32.self) - try _v.visit(field: VTOFFSET.bitWidth.p, fieldName: "bitWidth", required: false, type: Int32.self) - _v.finish() - } -} - -/// Date is either a 32-bit or 64-bit signed integer type representing an -/// elapsed time since UNIX epoch (1970-01-01), stored in either of two units: -/// -/// * Milliseconds (64 bits) indicating UNIX time elapsed since the epoch (no -/// leap seconds), where the values are evenly divisible by 86400000 -/// * Days (32 bits) since the UNIX epoch -public struct org_apache_arrow_flatbuf_Date: FlatBufferObject, Verifiable { - - static func validateVersion() { FlatBuffersVersion_23_1_4() } - public var __buffer: ByteBuffer! { return _accessor.bb } - private var _accessor: Table - - public static func getRootAsDate(bb: ByteBuffer) -> org_apache_arrow_flatbuf_Date { return org_apache_arrow_flatbuf_Date(Table(bb: bb, position: Int32(bb.read(def: UOffset.self, position: bb.reader)) + Int32(bb.reader))) } - - private init(_ t: Table) { _accessor = t } - public init(_ bb: ByteBuffer, o: Int32) { _accessor = Table(bb: bb, position: o) } - - private enum VTOFFSET: VOffset { - case unit = 4 - var v: Int32 { Int32(self.rawValue) } - var p: VOffset { self.rawValue } - } - - public var unit: org_apache_arrow_flatbuf_DateUnit { let o = _accessor.offset(VTOFFSET.unit.v); return o == 0 ? .millisecond : org_apache_arrow_flatbuf_DateUnit(rawValue: _accessor.readBuffer(of: Int16.self, at: o)) ?? .millisecond } - public static func startDate(_ fbb: inout FlatBufferBuilder) -> UOffset { fbb.startTable(with: 1) } - public static func add(unit: org_apache_arrow_flatbuf_DateUnit, _ fbb: inout FlatBufferBuilder) { fbb.add(element: unit.rawValue, def: 1, at: VTOFFSET.unit.p) } - public static func endDate(_ fbb: inout FlatBufferBuilder, start: UOffset) -> Offset { let end = Offset(offset: fbb.endTable(at: start)); return end } - public static func createDate( - _ fbb: inout FlatBufferBuilder, - unit: org_apache_arrow_flatbuf_DateUnit = .millisecond - ) -> Offset { - let __start = org_apache_arrow_flatbuf_Date.startDate(&fbb) - org_apache_arrow_flatbuf_Date.add(unit: unit, &fbb) - return org_apache_arrow_flatbuf_Date.endDate(&fbb, start: __start) - } - - public static func verify(_ verifier: inout Verifier, at position: Int, of type: T.Type) throws where T: Verifiable { - var _v = try verifier.visitTable(at: position) - try _v.visit(field: VTOFFSET.unit.p, fieldName: "unit", required: false, type: org_apache_arrow_flatbuf_DateUnit.self) - _v.finish() - } -} - -/// Time is either a 32-bit or 64-bit signed integer type representing an -/// elapsed time since midnight, stored in either of four units: seconds, -/// milliseconds, microseconds or nanoseconds. -/// -/// The integer `bitWidth` depends on the `unit` and must be one of the following: -/// * SECOND and MILLISECOND: 32 bits -/// * MICROSECOND and NANOSECOND: 64 bits -/// -/// The allowed values are between 0 (inclusive) and 86400 (=24*60*60) seconds -/// (exclusive), adjusted for the time unit (for example, up to 86400000 -/// exclusive for the MILLISECOND unit). -/// This definition doesn't allow for leap seconds. Time values from -/// measurements with leap seconds will need to be corrected when ingesting -/// into Arrow (for example by replacing the value 86400 with 86399). -public struct org_apache_arrow_flatbuf_Time: FlatBufferObject, Verifiable { - - static func validateVersion() { FlatBuffersVersion_23_1_4() } - public var __buffer: ByteBuffer! { return _accessor.bb } - private var _accessor: Table - - public static func getRootAsTime(bb: ByteBuffer) -> org_apache_arrow_flatbuf_Time { return org_apache_arrow_flatbuf_Time(Table(bb: bb, position: Int32(bb.read(def: UOffset.self, position: bb.reader)) + Int32(bb.reader))) } - - private init(_ t: Table) { _accessor = t } - public init(_ bb: ByteBuffer, o: Int32) { _accessor = Table(bb: bb, position: o) } - - private enum VTOFFSET: VOffset { - case unit = 4 - case bitWidth = 6 - var v: Int32 { Int32(self.rawValue) } - var p: VOffset { self.rawValue } - } - - public var unit: org_apache_arrow_flatbuf_TimeUnit { let o = _accessor.offset(VTOFFSET.unit.v); return o == 0 ? .millisecond : org_apache_arrow_flatbuf_TimeUnit(rawValue: _accessor.readBuffer(of: Int16.self, at: o)) ?? .millisecond } - public var bitWidth: Int32 { let o = _accessor.offset(VTOFFSET.bitWidth.v); return o == 0 ? 32 : _accessor.readBuffer(of: Int32.self, at: o) } - public static func startTime(_ fbb: inout FlatBufferBuilder) -> UOffset { fbb.startTable(with: 2) } - public static func add(unit: org_apache_arrow_flatbuf_TimeUnit, _ fbb: inout FlatBufferBuilder) { fbb.add(element: unit.rawValue, def: 1, at: VTOFFSET.unit.p) } - public static func add(bitWidth: Int32, _ fbb: inout FlatBufferBuilder) { fbb.add(element: bitWidth, def: 32, at: VTOFFSET.bitWidth.p) } - public static func endTime(_ fbb: inout FlatBufferBuilder, start: UOffset) -> Offset { let end = Offset(offset: fbb.endTable(at: start)); return end } - public static func createTime( - _ fbb: inout FlatBufferBuilder, - unit: org_apache_arrow_flatbuf_TimeUnit = .millisecond, - bitWidth: Int32 = 32 - ) -> Offset { - let __start = org_apache_arrow_flatbuf_Time.startTime(&fbb) - org_apache_arrow_flatbuf_Time.add(unit: unit, &fbb) - org_apache_arrow_flatbuf_Time.add(bitWidth: bitWidth, &fbb) - return org_apache_arrow_flatbuf_Time.endTime(&fbb, start: __start) - } - - public static func verify(_ verifier: inout Verifier, at position: Int, of type: T.Type) throws where T: Verifiable { - var _v = try verifier.visitTable(at: position) - try _v.visit(field: VTOFFSET.unit.p, fieldName: "unit", required: false, type: org_apache_arrow_flatbuf_TimeUnit.self) - try _v.visit(field: VTOFFSET.bitWidth.p, fieldName: "bitWidth", required: false, type: Int32.self) - _v.finish() - } -} - -/// Timestamp is a 64-bit signed integer representing an elapsed time since a -/// fixed epoch, stored in either of four units: seconds, milliseconds, -/// microseconds or nanoseconds, and is optionally annotated with a timezone. -/// -/// Timestamp values do not include any leap seconds (in other words, all -/// days are considered 86400 seconds long). -/// -/// Timestamps with a non-empty timezone -/// ------------------------------------ -/// -/// If a Timestamp column has a non-empty timezone value, its epoch is -/// 1970-01-01 00:00:00 (January 1st 1970, midnight) in the *UTC* timezone -/// (the Unix epoch), regardless of the Timestamp's own timezone. -/// -/// Therefore, timestamp values with a non-empty timezone correspond to -/// physical points in time together with some additional information about -/// how the data was obtained and/or how to display it (the timezone). -/// -/// For example, the timestamp value 0 with the timezone string "Europe/Paris" -/// corresponds to "January 1st 1970, 00h00" in the UTC timezone, but the -/// application may prefer to display it as "January 1st 1970, 01h00" in -/// the Europe/Paris timezone (which is the same physical point in time). -/// -/// One consequence is that timestamp values with a non-empty timezone -/// can be compared and ordered directly, since they all share the same -/// well-known point of reference (the Unix epoch). -/// -/// Timestamps with an unset / empty timezone -/// ----------------------------------------- -/// -/// If a Timestamp column has no timezone value, its epoch is -/// 1970-01-01 00:00:00 (January 1st 1970, midnight) in an *unknown* timezone. -/// -/// Therefore, timestamp values without a timezone cannot be meaningfully -/// interpreted as physical points in time, but only as calendar / clock -/// indications ("wall clock time") in an unspecified timezone. -/// -/// For example, the timestamp value 0 with an empty timezone string -/// corresponds to "January 1st 1970, 00h00" in an unknown timezone: there -/// is not enough information to interpret it as a well-defined physical -/// point in time. -/// -/// One consequence is that timestamp values without a timezone cannot -/// be reliably compared or ordered, since they may have different points of -/// reference. In particular, it is *not* possible to interpret an unset -/// or empty timezone as the same as "UTC". -/// -/// Conversion between timezones -/// ---------------------------- -/// -/// If a Timestamp column has a non-empty timezone, changing the timezone -/// to a different non-empty value is a metadata-only operation: -/// the timestamp values need not change as their point of reference remains -/// the same (the Unix epoch). -/// -/// However, if a Timestamp column has no timezone value, changing it to a -/// non-empty value requires to think about the desired semantics. -/// One possibility is to assume that the original timestamp values are -/// relative to the epoch of the timezone being set; timestamp values should -/// then adjusted to the Unix epoch (for example, changing the timezone from -/// empty to "Europe/Paris" would require converting the timestamp values -/// from "Europe/Paris" to "UTC", which seems counter-intuitive but is -/// nevertheless correct). -/// -/// Guidelines for encoding data from external libraries -/// ---------------------------------------------------- -/// -/// Date & time libraries often have multiple different data types for temporal -/// data. In order to ease interoperability between different implementations the -/// Arrow project has some recommendations for encoding these types into a Timestamp -/// column. -/// -/// An "instant" represents a physical point in time that has no relevant timezone -/// (for example, astronomical data). To encode an instant, use a Timestamp with -/// the timezone string set to "UTC", and make sure the Timestamp values -/// are relative to the UTC epoch (January 1st 1970, midnight). -/// -/// A "zoned date-time" represents a physical point in time annotated with an -/// informative timezone (for example, the timezone in which the data was -/// recorded). To encode a zoned date-time, use a Timestamp with the timezone -/// string set to the name of the timezone, and make sure the Timestamp values -/// are relative to the UTC epoch (January 1st 1970, midnight). -/// -/// (There is some ambiguity between an instant and a zoned date-time with the -/// UTC timezone. Both of these are stored the same in Arrow. Typically, -/// this distinction does not matter. If it does, then an application should -/// use custom metadata or an extension type to distinguish between the two cases.) -/// -/// An "offset date-time" represents a physical point in time combined with an -/// explicit offset from UTC. To encode an offset date-time, use a Timestamp -/// with the timezone string set to the numeric timezone offset string -/// (e.g. "+03:00"), and make sure the Timestamp values are relative to -/// the UTC epoch (January 1st 1970, midnight). -/// -/// A "naive date-time" (also called "local date-time" in some libraries) -/// represents a wall clock time combined with a calendar date, but with -/// no indication of how to map this information to a physical point in time. -/// Naive date-times must be handled with care because of this missing -/// information, and also because daylight saving time (DST) may make -/// some values ambiguous or nonexistent. A naive date-time may be -/// stored as a struct with Date and Time fields. However, it may also be -/// encoded into a Timestamp column with an empty timezone. The timestamp -/// values should be computed "as if" the timezone of the date-time values -/// was UTC; for example, the naive date-time "January 1st 1970, 00h00" would -/// be encoded as timestamp value 0. -public struct org_apache_arrow_flatbuf_Timestamp: FlatBufferObject, Verifiable { - - static func validateVersion() { FlatBuffersVersion_23_1_4() } - public var __buffer: ByteBuffer! { return _accessor.bb } - private var _accessor: Table - - public static func getRootAsTimestamp(bb: ByteBuffer) -> org_apache_arrow_flatbuf_Timestamp { return org_apache_arrow_flatbuf_Timestamp(Table(bb: bb, position: Int32(bb.read(def: UOffset.self, position: bb.reader)) + Int32(bb.reader))) } - - private init(_ t: Table) { _accessor = t } - public init(_ bb: ByteBuffer, o: Int32) { _accessor = Table(bb: bb, position: o) } - - private enum VTOFFSET: VOffset { - case unit = 4 - case timezone = 6 - var v: Int32 { Int32(self.rawValue) } - var p: VOffset { self.rawValue } - } - - public var unit: org_apache_arrow_flatbuf_TimeUnit { let o = _accessor.offset(VTOFFSET.unit.v); return o == 0 ? .second : org_apache_arrow_flatbuf_TimeUnit(rawValue: _accessor.readBuffer(of: Int16.self, at: o)) ?? .second } - /// The timezone is an optional string indicating the name of a timezone, - /// one of: - /// - /// * As used in the Olson timezone database (the "tz database" or - /// "tzdata"), such as "America/New_York". - /// * An absolute timezone offset of the form "+XX:XX" or "-XX:XX", - /// such as "+07:30". - /// - /// Whether a timezone string is present indicates different semantics about - /// the data (see above). - public var timezone: String? { let o = _accessor.offset(VTOFFSET.timezone.v); return o == 0 ? nil : _accessor.string(at: o) } - public var timezoneSegmentArray: [UInt8]? { return _accessor.getVector(at: VTOFFSET.timezone.v) } - public static func startTimestamp(_ fbb: inout FlatBufferBuilder) -> UOffset { fbb.startTable(with: 2) } - public static func add(unit: org_apache_arrow_flatbuf_TimeUnit, _ fbb: inout FlatBufferBuilder) { fbb.add(element: unit.rawValue, def: 0, at: VTOFFSET.unit.p) } - public static func add(timezone: Offset, _ fbb: inout FlatBufferBuilder) { fbb.add(offset: timezone, at: VTOFFSET.timezone.p) } - public static func endTimestamp(_ fbb: inout FlatBufferBuilder, start: UOffset) -> Offset { let end = Offset(offset: fbb.endTable(at: start)); return end } - public static func createTimestamp( - _ fbb: inout FlatBufferBuilder, - unit: org_apache_arrow_flatbuf_TimeUnit = .second, - timezoneOffset timezone: Offset = Offset() - ) -> Offset { - let __start = org_apache_arrow_flatbuf_Timestamp.startTimestamp(&fbb) - org_apache_arrow_flatbuf_Timestamp.add(unit: unit, &fbb) - org_apache_arrow_flatbuf_Timestamp.add(timezone: timezone, &fbb) - return org_apache_arrow_flatbuf_Timestamp.endTimestamp(&fbb, start: __start) - } - - public static func verify(_ verifier: inout Verifier, at position: Int, of type: T.Type) throws where T: Verifiable { - var _v = try verifier.visitTable(at: position) - try _v.visit(field: VTOFFSET.unit.p, fieldName: "unit", required: false, type: org_apache_arrow_flatbuf_TimeUnit.self) - try _v.visit(field: VTOFFSET.timezone.p, fieldName: "timezone", required: false, type: ForwardOffset.self) - _v.finish() - } -} - -public struct org_apache_arrow_flatbuf_Interval: FlatBufferObject, Verifiable { - - static func validateVersion() { FlatBuffersVersion_23_1_4() } - public var __buffer: ByteBuffer! { return _accessor.bb } - private var _accessor: Table - - public static func getRootAsInterval(bb: ByteBuffer) -> org_apache_arrow_flatbuf_Interval { return org_apache_arrow_flatbuf_Interval(Table(bb: bb, position: Int32(bb.read(def: UOffset.self, position: bb.reader)) + Int32(bb.reader))) } - - private init(_ t: Table) { _accessor = t } - public init(_ bb: ByteBuffer, o: Int32) { _accessor = Table(bb: bb, position: o) } - - private enum VTOFFSET: VOffset { - case unit = 4 - var v: Int32 { Int32(self.rawValue) } - var p: VOffset { self.rawValue } - } - - public var unit: org_apache_arrow_flatbuf_IntervalUnit { let o = _accessor.offset(VTOFFSET.unit.v); return o == 0 ? .yearMonth : org_apache_arrow_flatbuf_IntervalUnit(rawValue: _accessor.readBuffer(of: Int16.self, at: o)) ?? .yearMonth } - public static func startInterval(_ fbb: inout FlatBufferBuilder) -> UOffset { fbb.startTable(with: 1) } - public static func add(unit: org_apache_arrow_flatbuf_IntervalUnit, _ fbb: inout FlatBufferBuilder) { fbb.add(element: unit.rawValue, def: 0, at: VTOFFSET.unit.p) } - public static func endInterval(_ fbb: inout FlatBufferBuilder, start: UOffset) -> Offset { let end = Offset(offset: fbb.endTable(at: start)); return end } - public static func createInterval( - _ fbb: inout FlatBufferBuilder, - unit: org_apache_arrow_flatbuf_IntervalUnit = .yearMonth - ) -> Offset { - let __start = org_apache_arrow_flatbuf_Interval.startInterval(&fbb) - org_apache_arrow_flatbuf_Interval.add(unit: unit, &fbb) - return org_apache_arrow_flatbuf_Interval.endInterval(&fbb, start: __start) - } - - public static func verify(_ verifier: inout Verifier, at position: Int, of type: T.Type) throws where T: Verifiable { - var _v = try verifier.visitTable(at: position) - try _v.visit(field: VTOFFSET.unit.p, fieldName: "unit", required: false, type: org_apache_arrow_flatbuf_IntervalUnit.self) - _v.finish() - } -} - -public struct org_apache_arrow_flatbuf_Duration: FlatBufferObject, Verifiable { - - static func validateVersion() { FlatBuffersVersion_23_1_4() } - public var __buffer: ByteBuffer! { return _accessor.bb } - private var _accessor: Table - - public static func getRootAsDuration(bb: ByteBuffer) -> org_apache_arrow_flatbuf_Duration { return org_apache_arrow_flatbuf_Duration(Table(bb: bb, position: Int32(bb.read(def: UOffset.self, position: bb.reader)) + Int32(bb.reader))) } - - private init(_ t: Table) { _accessor = t } - public init(_ bb: ByteBuffer, o: Int32) { _accessor = Table(bb: bb, position: o) } - - private enum VTOFFSET: VOffset { - case unit = 4 - var v: Int32 { Int32(self.rawValue) } - var p: VOffset { self.rawValue } - } - - public var unit: org_apache_arrow_flatbuf_TimeUnit { let o = _accessor.offset(VTOFFSET.unit.v); return o == 0 ? .millisecond : org_apache_arrow_flatbuf_TimeUnit(rawValue: _accessor.readBuffer(of: Int16.self, at: o)) ?? .millisecond } - public static func startDuration(_ fbb: inout FlatBufferBuilder) -> UOffset { fbb.startTable(with: 1) } - public static func add(unit: org_apache_arrow_flatbuf_TimeUnit, _ fbb: inout FlatBufferBuilder) { fbb.add(element: unit.rawValue, def: 1, at: VTOFFSET.unit.p) } - public static func endDuration(_ fbb: inout FlatBufferBuilder, start: UOffset) -> Offset { let end = Offset(offset: fbb.endTable(at: start)); return end } - public static func createDuration( - _ fbb: inout FlatBufferBuilder, - unit: org_apache_arrow_flatbuf_TimeUnit = .millisecond - ) -> Offset { - let __start = org_apache_arrow_flatbuf_Duration.startDuration(&fbb) - org_apache_arrow_flatbuf_Duration.add(unit: unit, &fbb) - return org_apache_arrow_flatbuf_Duration.endDuration(&fbb, start: __start) - } - - public static func verify(_ verifier: inout Verifier, at position: Int, of type: T.Type) throws where T: Verifiable { - var _v = try verifier.visitTable(at: position) - try _v.visit(field: VTOFFSET.unit.p, fieldName: "unit", required: false, type: org_apache_arrow_flatbuf_TimeUnit.self) - _v.finish() - } -} - -/// ---------------------------------------------------------------------- -/// user defined key value pairs to add custom metadata to arrow -/// key namespacing is the responsibility of the user -public struct org_apache_arrow_flatbuf_KeyValue: FlatBufferObject, Verifiable { - - static func validateVersion() { FlatBuffersVersion_23_1_4() } - public var __buffer: ByteBuffer! { return _accessor.bb } - private var _accessor: Table - - public static func getRootAsKeyValue(bb: ByteBuffer) -> org_apache_arrow_flatbuf_KeyValue { return org_apache_arrow_flatbuf_KeyValue(Table(bb: bb, position: Int32(bb.read(def: UOffset.self, position: bb.reader)) + Int32(bb.reader))) } - - private init(_ t: Table) { _accessor = t } - public init(_ bb: ByteBuffer, o: Int32) { _accessor = Table(bb: bb, position: o) } - - private enum VTOFFSET: VOffset { - case key = 4 - case value = 6 - var v: Int32 { Int32(self.rawValue) } - var p: VOffset { self.rawValue } - } - - public var key: String? { let o = _accessor.offset(VTOFFSET.key.v); return o == 0 ? nil : _accessor.string(at: o) } - public var keySegmentArray: [UInt8]? { return _accessor.getVector(at: VTOFFSET.key.v) } - public var value: String? { let o = _accessor.offset(VTOFFSET.value.v); return o == 0 ? nil : _accessor.string(at: o) } - public var valueSegmentArray: [UInt8]? { return _accessor.getVector(at: VTOFFSET.value.v) } - public static func startKeyValue(_ fbb: inout FlatBufferBuilder) -> UOffset { fbb.startTable(with: 2) } - public static func add(key: Offset, _ fbb: inout FlatBufferBuilder) { fbb.add(offset: key, at: VTOFFSET.key.p) } - public static func add(value: Offset, _ fbb: inout FlatBufferBuilder) { fbb.add(offset: value, at: VTOFFSET.value.p) } - public static func endKeyValue(_ fbb: inout FlatBufferBuilder, start: UOffset) -> Offset { let end = Offset(offset: fbb.endTable(at: start)); return end } - public static func createKeyValue( - _ fbb: inout FlatBufferBuilder, - keyOffset key: Offset = Offset(), - valueOffset value: Offset = Offset() - ) -> Offset { - let __start = org_apache_arrow_flatbuf_KeyValue.startKeyValue(&fbb) - org_apache_arrow_flatbuf_KeyValue.add(key: key, &fbb) - org_apache_arrow_flatbuf_KeyValue.add(value: value, &fbb) - return org_apache_arrow_flatbuf_KeyValue.endKeyValue(&fbb, start: __start) - } - - public static func verify(_ verifier: inout Verifier, at position: Int, of type: T.Type) throws where T: Verifiable { - var _v = try verifier.visitTable(at: position) - try _v.visit(field: VTOFFSET.key.p, fieldName: "key", required: false, type: ForwardOffset.self) - try _v.visit(field: VTOFFSET.value.p, fieldName: "value", required: false, type: ForwardOffset.self) - _v.finish() - } -} - -public struct org_apache_arrow_flatbuf_DictionaryEncoding: FlatBufferObject, Verifiable { - - static func validateVersion() { FlatBuffersVersion_23_1_4() } - public var __buffer: ByteBuffer! { return _accessor.bb } - private var _accessor: Table - - public static func getRootAsDictionaryEncoding(bb: ByteBuffer) -> org_apache_arrow_flatbuf_DictionaryEncoding { return org_apache_arrow_flatbuf_DictionaryEncoding(Table(bb: bb, position: Int32(bb.read(def: UOffset.self, position: bb.reader)) + Int32(bb.reader))) } - - private init(_ t: Table) { _accessor = t } - public init(_ bb: ByteBuffer, o: Int32) { _accessor = Table(bb: bb, position: o) } - - private enum VTOFFSET: VOffset { - case id = 4 - case indexType = 6 - case isOrdered = 8 - case dictionaryKind = 10 - var v: Int32 { Int32(self.rawValue) } - var p: VOffset { self.rawValue } - } - - /// The known dictionary id in the application where this data is used. In - /// the file or streaming formats, the dictionary ids are found in the - /// DictionaryBatch messages - public var id: Int64 { let o = _accessor.offset(VTOFFSET.id.v); return o == 0 ? 0 : _accessor.readBuffer(of: Int64.self, at: o) } - /// The dictionary indices are constrained to be non-negative integers. If - /// this field is null, the indices must be signed int32. To maximize - /// cross-language compatibility and performance, implementations are - /// recommended to prefer signed integer types over unsigned integer types - /// and to avoid uint64 indices unless they are required by an application. - public var indexType: org_apache_arrow_flatbuf_Int? { let o = _accessor.offset(VTOFFSET.indexType.v); return o == 0 ? nil : org_apache_arrow_flatbuf_Int(_accessor.bb, o: _accessor.indirect(o + _accessor.position)) } - /// By default, dictionaries are not ordered, or the order does not have - /// semantic meaning. In some statistical, applications, dictionary-encoding - /// is used to represent ordered categorical data, and we provide a way to - /// preserve that metadata here - public var isOrdered: Bool { let o = _accessor.offset(VTOFFSET.isOrdered.v); return o == 0 ? false : 0 != _accessor.readBuffer(of: Byte.self, at: o) } - public var dictionaryKind: org_apache_arrow_flatbuf_DictionaryKind { let o = _accessor.offset(VTOFFSET.dictionaryKind.v); return o == 0 ? .densearray : org_apache_arrow_flatbuf_DictionaryKind(rawValue: _accessor.readBuffer(of: Int16.self, at: o)) ?? .densearray } - public static func startDictionaryEncoding(_ fbb: inout FlatBufferBuilder) -> UOffset { fbb.startTable(with: 4) } - public static func add(id: Int64, _ fbb: inout FlatBufferBuilder) { fbb.add(element: id, def: 0, at: VTOFFSET.id.p) } - public static func add(indexType: Offset, _ fbb: inout FlatBufferBuilder) { fbb.add(offset: indexType, at: VTOFFSET.indexType.p) } - public static func add(isOrdered: Bool, _ fbb: inout FlatBufferBuilder) { fbb.add(element: isOrdered, def: false, - at: VTOFFSET.isOrdered.p) } - public static func add(dictionaryKind: org_apache_arrow_flatbuf_DictionaryKind, _ fbb: inout FlatBufferBuilder) { fbb.add(element: dictionaryKind.rawValue, def: 0, at: VTOFFSET.dictionaryKind.p) } - public static func endDictionaryEncoding(_ fbb: inout FlatBufferBuilder, start: UOffset) -> Offset { let end = Offset(offset: fbb.endTable(at: start)); return end } - public static func createDictionaryEncoding( - _ fbb: inout FlatBufferBuilder, - id: Int64 = 0, - indexTypeOffset indexType: Offset = Offset(), - isOrdered: Bool = false, - dictionaryKind: org_apache_arrow_flatbuf_DictionaryKind = .densearray - ) -> Offset { - let __start = org_apache_arrow_flatbuf_DictionaryEncoding.startDictionaryEncoding(&fbb) - org_apache_arrow_flatbuf_DictionaryEncoding.add(id: id, &fbb) - org_apache_arrow_flatbuf_DictionaryEncoding.add(indexType: indexType, &fbb) - org_apache_arrow_flatbuf_DictionaryEncoding.add(isOrdered: isOrdered, &fbb) - org_apache_arrow_flatbuf_DictionaryEncoding.add(dictionaryKind: dictionaryKind, &fbb) - return org_apache_arrow_flatbuf_DictionaryEncoding.endDictionaryEncoding(&fbb, start: __start) - } - - public static func verify(_ verifier: inout Verifier, at position: Int, of type: T.Type) throws where T: Verifiable { - var _v = try verifier.visitTable(at: position) - try _v.visit(field: VTOFFSET.id.p, fieldName: "id", required: false, type: Int64.self) - try _v.visit(field: VTOFFSET.indexType.p, fieldName: "indexType", required: false, type: ForwardOffset.self) - try _v.visit(field: VTOFFSET.isOrdered.p, fieldName: "isOrdered", required: false, type: Bool.self) - try _v.visit(field: VTOFFSET.dictionaryKind.p, fieldName: "dictionaryKind", required: false, type: org_apache_arrow_flatbuf_DictionaryKind.self) - _v.finish() - } -} - -/// ---------------------------------------------------------------------- -/// A field represents a named column in a record / row batch or child of a -/// nested type. -public struct org_apache_arrow_flatbuf_Field: FlatBufferObject, Verifiable { - - static func validateVersion() { FlatBuffersVersion_23_1_4() } - public var __buffer: ByteBuffer! { return _accessor.bb } - private var _accessor: Table - - public static func getRootAsField(bb: ByteBuffer) -> org_apache_arrow_flatbuf_Field { return org_apache_arrow_flatbuf_Field(Table(bb: bb, position: Int32(bb.read(def: UOffset.self, position: bb.reader)) + Int32(bb.reader))) } - - private init(_ t: Table) { _accessor = t } - public init(_ bb: ByteBuffer, o: Int32) { _accessor = Table(bb: bb, position: o) } - - private enum VTOFFSET: VOffset { - case name = 4 - case nullable = 6 - case typeType = 8 - case type = 10 - case dictionary = 12 - case children = 14 - case customMetadata = 16 - var v: Int32 { Int32(self.rawValue) } - var p: VOffset { self.rawValue } - } - - /// Name is not required, in i.e. a List - public var name: String? { let o = _accessor.offset(VTOFFSET.name.v); return o == 0 ? nil : _accessor.string(at: o) } - public var nameSegmentArray: [UInt8]? { return _accessor.getVector(at: VTOFFSET.name.v) } - /// Whether or not this field can contain nulls. Should be true in general. - public var nullable: Bool { let o = _accessor.offset(VTOFFSET.nullable.v); return o == 0 ? false : 0 != _accessor.readBuffer(of: Byte.self, at: o) } - public var typeType: org_apache_arrow_flatbuf_Type_ { let o = _accessor.offset(VTOFFSET.typeType.v); return o == 0 ? .none_ : org_apache_arrow_flatbuf_Type_(rawValue: _accessor.readBuffer(of: UInt8.self, at: o)) ?? .none_ } - /// This is the type of the decoded value if the field is dictionary encoded. - public func type(type: T.Type) -> T? { let o = _accessor.offset(VTOFFSET.type.v); return o == 0 ? nil : _accessor.union(o) } - /// Present only if the field is dictionary encoded. - public var dictionary: org_apache_arrow_flatbuf_DictionaryEncoding? { let o = _accessor.offset(VTOFFSET.dictionary.v); return o == 0 ? nil : org_apache_arrow_flatbuf_DictionaryEncoding(_accessor.bb, o: _accessor.indirect(o + _accessor.position)) } - /// children apply only to nested data types like Struct, List and Union. For - /// primitive types children will have length 0. - public var hasChildren: Bool { let o = _accessor.offset(VTOFFSET.children.v); return o == 0 ? false : true } - public var childrenCount: Int32 { let o = _accessor.offset(VTOFFSET.children.v); return o == 0 ? 0 : _accessor.vector(count: o) } - public func children(at index: Int32) -> org_apache_arrow_flatbuf_Field? { let o = _accessor.offset(VTOFFSET.children.v); return o == 0 ? nil : org_apache_arrow_flatbuf_Field(_accessor.bb, o: _accessor.indirect(_accessor.vector(at: o) + index * 4)) } - /// User-defined metadata - public var hasCustomMetadata: Bool { let o = _accessor.offset(VTOFFSET.customMetadata.v); return o == 0 ? false : true } - public var customMetadataCount: Int32 { let o = _accessor.offset(VTOFFSET.customMetadata.v); return o == 0 ? 0 : _accessor.vector(count: o) } - public func customMetadata(at index: Int32) -> org_apache_arrow_flatbuf_KeyValue? { let o = _accessor.offset(VTOFFSET.customMetadata.v); return o == 0 ? nil : org_apache_arrow_flatbuf_KeyValue(_accessor.bb, o: _accessor.indirect(_accessor.vector(at: o) + index * 4)) } - public static func startField(_ fbb: inout FlatBufferBuilder) -> UOffset { fbb.startTable(with: 7) } - public static func add(name: Offset, _ fbb: inout FlatBufferBuilder) { fbb.add(offset: name, at: VTOFFSET.name.p) } - public static func add(nullable: Bool, _ fbb: inout FlatBufferBuilder) { fbb.add(element: nullable, def: false, - at: VTOFFSET.nullable.p) } - public static func add(typeType: org_apache_arrow_flatbuf_Type_, _ fbb: inout FlatBufferBuilder) { fbb.add(element: typeType.rawValue, def: 0, at: VTOFFSET.typeType.p) } - public static func add(type: Offset, _ fbb: inout FlatBufferBuilder) { fbb.add(offset: type, at: VTOFFSET.type.p) } - public static func add(dictionary: Offset, _ fbb: inout FlatBufferBuilder) { fbb.add(offset: dictionary, at: VTOFFSET.dictionary.p) } - public static func addVectorOf(children: Offset, _ fbb: inout FlatBufferBuilder) { fbb.add(offset: children, at: VTOFFSET.children.p) } - public static func addVectorOf(customMetadata: Offset, _ fbb: inout FlatBufferBuilder) { fbb.add(offset: customMetadata, at: VTOFFSET.customMetadata.p) } - public static func endField(_ fbb: inout FlatBufferBuilder, start: UOffset) -> Offset { let end = Offset(offset: fbb.endTable(at: start)); return end } - public static func createField( - _ fbb: inout FlatBufferBuilder, - nameOffset name: Offset = Offset(), - nullable: Bool = false, - typeType: org_apache_arrow_flatbuf_Type_ = .none_, - typeOffset type: Offset = Offset(), - dictionaryOffset dictionary: Offset = Offset(), - childrenVectorOffset children: Offset = Offset(), - customMetadataVectorOffset customMetadata: Offset = Offset() - ) -> Offset { - let __start = org_apache_arrow_flatbuf_Field.startField(&fbb) - org_apache_arrow_flatbuf_Field.add(name: name, &fbb) - org_apache_arrow_flatbuf_Field.add(nullable: nullable, &fbb) - org_apache_arrow_flatbuf_Field.add(typeType: typeType, &fbb) - org_apache_arrow_flatbuf_Field.add(type: type, &fbb) - org_apache_arrow_flatbuf_Field.add(dictionary: dictionary, &fbb) - org_apache_arrow_flatbuf_Field.addVectorOf(children: children, &fbb) - org_apache_arrow_flatbuf_Field.addVectorOf(customMetadata: customMetadata, &fbb) - return org_apache_arrow_flatbuf_Field.endField(&fbb, start: __start) - } - - public static func verify(_ verifier: inout Verifier, at position: Int, of type: T.Type) throws where T: Verifiable { - var _v = try verifier.visitTable(at: position) - try _v.visit(field: VTOFFSET.name.p, fieldName: "name", required: false, type: ForwardOffset.self) - try _v.visit(field: VTOFFSET.nullable.p, fieldName: "nullable", required: false, type: Bool.self) - try _v.visit(unionKey: VTOFFSET.typeType.p, unionField: VTOFFSET.type.p, unionKeyName: "typeType", fieldName: "type", required: false, completion: { (verifier, key: org_apache_arrow_flatbuf_Type_, pos) in - switch key { - case .none_: - break // NOTE - SWIFT doesnt support none - case .null: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_Null.self) - case .int: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_Int.self) - case .floatingpoint: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_FloatingPoint.self) - case .binary: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_Binary.self) - case .utf8: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_Utf8.self) - case .bool: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_Bool.self) - case .decimal: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_Decimal.self) - case .date: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_Date.self) - case .time: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_Time.self) - case .timestamp: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_Timestamp.self) - case .interval: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_Interval.self) - case .list: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_List.self) - case .struct_: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_Struct_.self) - case .union: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_Union.self) - case .fixedsizebinary: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_FixedSizeBinary.self) - case .fixedsizelist: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_FixedSizeList.self) - case .map: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_Map.self) - case .duration: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_Duration.self) - case .largebinary: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_LargeBinary.self) - case .largeutf8: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_LargeUtf8.self) - case .largelist: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_LargeList.self) - case .runendencoded: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_RunEndEncoded.self) - } - }) - try _v.visit(field: VTOFFSET.dictionary.p, fieldName: "dictionary", required: false, type: ForwardOffset.self) - try _v.visit(field: VTOFFSET.children.p, fieldName: "children", required: false, type: ForwardOffset, org_apache_arrow_flatbuf_Field>>.self) - try _v.visit(field: VTOFFSET.customMetadata.p, fieldName: "customMetadata", required: false, type: ForwardOffset, org_apache_arrow_flatbuf_KeyValue>>.self) - _v.finish() - } -} - -/// ---------------------------------------------------------------------- -/// A Schema describes the columns in a row batch -public struct org_apache_arrow_flatbuf_Schema: FlatBufferObject, Verifiable { - - static func validateVersion() { FlatBuffersVersion_23_1_4() } - public var __buffer: ByteBuffer! { return _accessor.bb } - private var _accessor: Table - - public static func getRootAsSchema(bb: ByteBuffer) -> org_apache_arrow_flatbuf_Schema { return org_apache_arrow_flatbuf_Schema(Table(bb: bb, position: Int32(bb.read(def: UOffset.self, position: bb.reader)) + Int32(bb.reader))) } - - private init(_ t: Table) { _accessor = t } - public init(_ bb: ByteBuffer, o: Int32) { _accessor = Table(bb: bb, position: o) } - - private enum VTOFFSET: VOffset { - case endianness = 4 - case fields = 6 - case customMetadata = 8 - case features = 10 - var v: Int32 { Int32(self.rawValue) } - var p: VOffset { self.rawValue } - } - - /// endianness of the buffer - /// it is Little Endian by default - /// if endianness doesn't match the underlying system then the vectors need to be converted - public var endianness: org_apache_arrow_flatbuf_Endianness { let o = _accessor.offset(VTOFFSET.endianness.v); return o == 0 ? .little : org_apache_arrow_flatbuf_Endianness(rawValue: _accessor.readBuffer(of: Int16.self, at: o)) ?? .little } - public var hasFields: Bool { let o = _accessor.offset(VTOFFSET.fields.v); return o == 0 ? false : true } - public var fieldsCount: Int32 { let o = _accessor.offset(VTOFFSET.fields.v); return o == 0 ? 0 : _accessor.vector(count: o) } - public func fields(at index: Int32) -> org_apache_arrow_flatbuf_Field? { let o = _accessor.offset(VTOFFSET.fields.v); return o == 0 ? nil : org_apache_arrow_flatbuf_Field(_accessor.bb, o: _accessor.indirect(_accessor.vector(at: o) + index * 4)) } - public var hasCustomMetadata: Bool { let o = _accessor.offset(VTOFFSET.customMetadata.v); return o == 0 ? false : true } - public var customMetadataCount: Int32 { let o = _accessor.offset(VTOFFSET.customMetadata.v); return o == 0 ? 0 : _accessor.vector(count: o) } - public func customMetadata(at index: Int32) -> org_apache_arrow_flatbuf_KeyValue? { let o = _accessor.offset(VTOFFSET.customMetadata.v); return o == 0 ? nil : org_apache_arrow_flatbuf_KeyValue(_accessor.bb, o: _accessor.indirect(_accessor.vector(at: o) + index * 4)) } - /// Features used in the stream/file. - public var hasFeatures: Bool { let o = _accessor.offset(VTOFFSET.features.v); return o == 0 ? false : true } - public var featuresCount: Int32 { let o = _accessor.offset(VTOFFSET.features.v); return o == 0 ? 0 : _accessor.vector(count: o) } - public func features(at index: Int32) -> org_apache_arrow_flatbuf_Feature? { let o = _accessor.offset(VTOFFSET.features.v); return o == 0 ? org_apache_arrow_flatbuf_Feature.unused : org_apache_arrow_flatbuf_Feature(rawValue: _accessor.directRead(of: Int64.self, offset: _accessor.vector(at: o) + index * 8)) } - public static func startSchema(_ fbb: inout FlatBufferBuilder) -> UOffset { fbb.startTable(with: 4) } - public static func add(endianness: org_apache_arrow_flatbuf_Endianness, _ fbb: inout FlatBufferBuilder) { fbb.add(element: endianness.rawValue, def: 0, at: VTOFFSET.endianness.p) } - public static func addVectorOf(fields: Offset, _ fbb: inout FlatBufferBuilder) { fbb.add(offset: fields, at: VTOFFSET.fields.p) } - public static func addVectorOf(customMetadata: Offset, _ fbb: inout FlatBufferBuilder) { fbb.add(offset: customMetadata, at: VTOFFSET.customMetadata.p) } - public static func addVectorOf(features: Offset, _ fbb: inout FlatBufferBuilder) { fbb.add(offset: features, at: VTOFFSET.features.p) } - public static func endSchema(_ fbb: inout FlatBufferBuilder, start: UOffset) -> Offset { let end = Offset(offset: fbb.endTable(at: start)); return end } - public static func createSchema( - _ fbb: inout FlatBufferBuilder, - endianness: org_apache_arrow_flatbuf_Endianness = .little, - fieldsVectorOffset fields: Offset = Offset(), - customMetadataVectorOffset customMetadata: Offset = Offset(), - featuresVectorOffset features: Offset = Offset() - ) -> Offset { - let __start = org_apache_arrow_flatbuf_Schema.startSchema(&fbb) - org_apache_arrow_flatbuf_Schema.add(endianness: endianness, &fbb) - org_apache_arrow_flatbuf_Schema.addVectorOf(fields: fields, &fbb) - org_apache_arrow_flatbuf_Schema.addVectorOf(customMetadata: customMetadata, &fbb) - org_apache_arrow_flatbuf_Schema.addVectorOf(features: features, &fbb) - return org_apache_arrow_flatbuf_Schema.endSchema(&fbb, start: __start) - } - - public static func verify(_ verifier: inout Verifier, at position: Int, of type: T.Type) throws where T: Verifiable { - var _v = try verifier.visitTable(at: position) - try _v.visit(field: VTOFFSET.endianness.p, fieldName: "endianness", required: false, type: org_apache_arrow_flatbuf_Endianness.self) - try _v.visit(field: VTOFFSET.fields.p, fieldName: "fields", required: false, type: ForwardOffset, org_apache_arrow_flatbuf_Field>>.self) - try _v.visit(field: VTOFFSET.customMetadata.p, fieldName: "customMetadata", required: false, type: ForwardOffset, org_apache_arrow_flatbuf_KeyValue>>.self) - try _v.visit(field: VTOFFSET.features.p, fieldName: "features", required: false, type: ForwardOffset>.self) - _v.finish() - } -} - diff --git a/swift/Arrow/Sources/Arrow/SparseTensor_generated.swift b/swift/Arrow/Sources/Arrow/SparseTensor_generated.swift deleted file mode 100644 index a2dfbdb4825..00000000000 --- a/swift/Arrow/Sources/Arrow/SparseTensor_generated.swift +++ /dev/null @@ -1,535 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -// automatically generated by the FlatBuffers compiler, do not modify -// swiftlint:disable all -// swiftformat:disable all - -import FlatBuffers - -public enum org_apache_arrow_flatbuf_SparseMatrixCompressedAxis: Int16, Enum, Verifiable { - public typealias T = Int16 - public static var byteSize: Int { return MemoryLayout.size } - public var value: Int16 { return self.rawValue } - case row = 0 - case column = 1 - - public static var max: org_apache_arrow_flatbuf_SparseMatrixCompressedAxis { return .column } - public static var min: org_apache_arrow_flatbuf_SparseMatrixCompressedAxis { return .row } -} - - -public enum org_apache_arrow_flatbuf_SparseTensorIndex: UInt8, UnionEnum { - public typealias T = UInt8 - - public init?(value: T) { - self.init(rawValue: value) - } - - public static var byteSize: Int { return MemoryLayout.size } - public var value: UInt8 { return self.rawValue } - case none_ = 0 - case sparsetensorindexcoo = 1 - case sparsematrixindexcsx = 2 - case sparsetensorindexcsf = 3 - - public static var max: org_apache_arrow_flatbuf_SparseTensorIndex { return .sparsetensorindexcsf } - public static var min: org_apache_arrow_flatbuf_SparseTensorIndex { return .none_ } -} - - -/// ---------------------------------------------------------------------- -/// EXPERIMENTAL: Data structures for sparse tensors -/// Coordinate (COO) format of sparse tensor index. -/// -/// COO's index list are represented as a NxM matrix, -/// where N is the number of non-zero values, -/// and M is the number of dimensions of a sparse tensor. -/// -/// indicesBuffer stores the location and size of the data of this indices -/// matrix. The value type and the stride of the indices matrix is -/// specified in indicesType and indicesStrides fields. -/// -/// For example, let X be a 2x3x4x5 tensor, and it has the following -/// 6 non-zero values: -/// ```text -/// X[0, 1, 2, 0] := 1 -/// X[1, 1, 2, 3] := 2 -/// X[0, 2, 1, 0] := 3 -/// X[0, 1, 3, 0] := 4 -/// X[0, 1, 2, 1] := 5 -/// X[1, 2, 0, 4] := 6 -/// ``` -/// In COO format, the index matrix of X is the following 4x6 matrix: -/// ```text -/// [[0, 0, 0, 0, 1, 1], -/// [1, 1, 1, 2, 1, 2], -/// [2, 2, 3, 1, 2, 0], -/// [0, 1, 0, 0, 3, 4]] -/// ``` -/// When isCanonical is true, the indices is sorted in lexicographical order -/// (row-major order), and it does not have duplicated entries. Otherwise, -/// the indices may not be sorted, or may have duplicated entries. -public struct org_apache_arrow_flatbuf_SparseTensorIndexCOO: FlatBufferObject, Verifiable { - - static func validateVersion() { FlatBuffersVersion_23_1_4() } - public var __buffer: ByteBuffer! { return _accessor.bb } - private var _accessor: Table - - public static func getRootAsSparseTensorIndexCOO(bb: ByteBuffer) -> org_apache_arrow_flatbuf_SparseTensorIndexCOO { return org_apache_arrow_flatbuf_SparseTensorIndexCOO(Table(bb: bb, position: Int32(bb.read(def: UOffset.self, position: bb.reader)) + Int32(bb.reader))) } - - private init(_ t: Table) { _accessor = t } - public init(_ bb: ByteBuffer, o: Int32) { _accessor = Table(bb: bb, position: o) } - - private enum VTOFFSET: VOffset { - case indicesType = 4 - case indicesStrides = 6 - case indicesBuffer = 8 - case isCanonical = 10 - var v: Int32 { Int32(self.rawValue) } - var p: VOffset { self.rawValue } - } - - /// The type of values in indicesBuffer - public var indicesType: org_apache_arrow_flatbuf_Int! { let o = _accessor.offset(VTOFFSET.indicesType.v); return org_apache_arrow_flatbuf_Int(_accessor.bb, o: _accessor.indirect(o + _accessor.position)) } - /// Non-negative byte offsets to advance one value cell along each dimension - /// If omitted, default to row-major order (C-like). - public var hasIndicesStrides: Bool { let o = _accessor.offset(VTOFFSET.indicesStrides.v); return o == 0 ? false : true } - public var indicesStridesCount: Int32 { let o = _accessor.offset(VTOFFSET.indicesStrides.v); return o == 0 ? 0 : _accessor.vector(count: o) } - public func indicesStrides(at index: Int32) -> Int64 { let o = _accessor.offset(VTOFFSET.indicesStrides.v); return o == 0 ? 0 : _accessor.directRead(of: Int64.self, offset: _accessor.vector(at: o) + index * 8) } - public var indicesStrides: [Int64] { return _accessor.getVector(at: VTOFFSET.indicesStrides.v) ?? [] } - /// The location and size of the indices matrix's data - public var indicesBuffer: org_apache_arrow_flatbuf_Buffer! { let o = _accessor.offset(VTOFFSET.indicesBuffer.v); return _accessor.readBuffer(of: org_apache_arrow_flatbuf_Buffer.self, at: o) } - public var mutableIndicesBuffer: org_apache_arrow_flatbuf_Buffer_Mutable! { let o = _accessor.offset(VTOFFSET.indicesBuffer.v); return org_apache_arrow_flatbuf_Buffer_Mutable(_accessor.bb, o: o + _accessor.position) } - /// This flag is true if and only if the indices matrix is sorted in - /// row-major order, and does not have duplicated entries. - /// This sort order is the same as of Tensorflow's SparseTensor, - /// but it is inverse order of SciPy's canonical coo_matrix - /// (SciPy employs column-major order for its coo_matrix). - public var isCanonical: Bool { let o = _accessor.offset(VTOFFSET.isCanonical.v); return o == 0 ? false : 0 != _accessor.readBuffer(of: Byte.self, at: o) } - public static func startSparseTensorIndexCOO(_ fbb: inout FlatBufferBuilder) -> UOffset { fbb.startTable(with: 4) } - public static func add(indicesType: Offset, _ fbb: inout FlatBufferBuilder) { fbb.add(offset: indicesType, at: VTOFFSET.indicesType.p) } - public static func addVectorOf(indicesStrides: Offset, _ fbb: inout FlatBufferBuilder) { fbb.add(offset: indicesStrides, at: VTOFFSET.indicesStrides.p) } - public static func add(indicesBuffer: org_apache_arrow_flatbuf_Buffer?, _ fbb: inout FlatBufferBuilder) { guard let indicesBuffer = indicesBuffer else { return }; fbb.create(struct: indicesBuffer, position: VTOFFSET.indicesBuffer.p) } - public static func add(isCanonical: Bool, _ fbb: inout FlatBufferBuilder) { fbb.add(element: isCanonical, def: false, - at: VTOFFSET.isCanonical.p) } - public static func endSparseTensorIndexCOO(_ fbb: inout FlatBufferBuilder, start: UOffset) -> Offset { let end = Offset(offset: fbb.endTable(at: start)); fbb.require(table: end, fields: [4, 8]); return end } - public static func createSparseTensorIndexCOO( - _ fbb: inout FlatBufferBuilder, - indicesTypeOffset indicesType: Offset, - indicesStridesVectorOffset indicesStrides: Offset = Offset(), - indicesBuffer: org_apache_arrow_flatbuf_Buffer, - isCanonical: Bool = false - ) -> Offset { - let __start = org_apache_arrow_flatbuf_SparseTensorIndexCOO.startSparseTensorIndexCOO(&fbb) - org_apache_arrow_flatbuf_SparseTensorIndexCOO.add(indicesType: indicesType, &fbb) - org_apache_arrow_flatbuf_SparseTensorIndexCOO.addVectorOf(indicesStrides: indicesStrides, &fbb) - org_apache_arrow_flatbuf_SparseTensorIndexCOO.add(indicesBuffer: indicesBuffer, &fbb) - org_apache_arrow_flatbuf_SparseTensorIndexCOO.add(isCanonical: isCanonical, &fbb) - return org_apache_arrow_flatbuf_SparseTensorIndexCOO.endSparseTensorIndexCOO(&fbb, start: __start) - } - - public static func verify(_ verifier: inout Verifier, at position: Int, of type: T.Type) throws where T: Verifiable { - var _v = try verifier.visitTable(at: position) - try _v.visit(field: VTOFFSET.indicesType.p, fieldName: "indicesType", required: true, type: ForwardOffset.self) - try _v.visit(field: VTOFFSET.indicesStrides.p, fieldName: "indicesStrides", required: false, type: ForwardOffset>.self) - try _v.visit(field: VTOFFSET.indicesBuffer.p, fieldName: "indicesBuffer", required: true, type: org_apache_arrow_flatbuf_Buffer.self) - try _v.visit(field: VTOFFSET.isCanonical.p, fieldName: "isCanonical", required: false, type: Bool.self) - _v.finish() - } -} - -/// Compressed Sparse format, that is matrix-specific. -public struct org_apache_arrow_flatbuf_SparseMatrixIndexCSX: FlatBufferObject, Verifiable { - - static func validateVersion() { FlatBuffersVersion_23_1_4() } - public var __buffer: ByteBuffer! { return _accessor.bb } - private var _accessor: Table - - public static func getRootAsSparseMatrixIndexCSX(bb: ByteBuffer) -> org_apache_arrow_flatbuf_SparseMatrixIndexCSX { return org_apache_arrow_flatbuf_SparseMatrixIndexCSX(Table(bb: bb, position: Int32(bb.read(def: UOffset.self, position: bb.reader)) + Int32(bb.reader))) } - - private init(_ t: Table) { _accessor = t } - public init(_ bb: ByteBuffer, o: Int32) { _accessor = Table(bb: bb, position: o) } - - private enum VTOFFSET: VOffset { - case compressedAxis = 4 - case indptrType = 6 - case indptrBuffer = 8 - case indicesType = 10 - case indicesBuffer = 12 - var v: Int32 { Int32(self.rawValue) } - var p: VOffset { self.rawValue } - } - - /// Which axis, row or column, is compressed - public var compressedAxis: org_apache_arrow_flatbuf_SparseMatrixCompressedAxis { let o = _accessor.offset(VTOFFSET.compressedAxis.v); return o == 0 ? .row : org_apache_arrow_flatbuf_SparseMatrixCompressedAxis(rawValue: _accessor.readBuffer(of: Int16.self, at: o)) ?? .row } - /// The type of values in indptrBuffer - public var indptrType: org_apache_arrow_flatbuf_Int! { let o = _accessor.offset(VTOFFSET.indptrType.v); return org_apache_arrow_flatbuf_Int(_accessor.bb, o: _accessor.indirect(o + _accessor.position)) } - /// indptrBuffer stores the location and size of indptr array that - /// represents the range of the rows. - /// The i-th row spans from `indptr[i]` to `indptr[i+1]` in the data. - /// The length of this array is 1 + (the number of rows), and the type - /// of index value is long. - /// - /// For example, let X be the following 6x4 matrix: - /// ```text - /// X := [[0, 1, 2, 0], - /// [0, 0, 3, 0], - /// [0, 4, 0, 5], - /// [0, 0, 0, 0], - /// [6, 0, 7, 8], - /// [0, 9, 0, 0]]. - /// ``` - /// The array of non-zero values in X is: - /// ```text - /// values(X) = [1, 2, 3, 4, 5, 6, 7, 8, 9]. - /// ``` - /// And the indptr of X is: - /// ```text - /// indptr(X) = [0, 2, 3, 5, 5, 8, 10]. - /// ``` - public var indptrBuffer: org_apache_arrow_flatbuf_Buffer! { let o = _accessor.offset(VTOFFSET.indptrBuffer.v); return _accessor.readBuffer(of: org_apache_arrow_flatbuf_Buffer.self, at: o) } - public var mutableIndptrBuffer: org_apache_arrow_flatbuf_Buffer_Mutable! { let o = _accessor.offset(VTOFFSET.indptrBuffer.v); return org_apache_arrow_flatbuf_Buffer_Mutable(_accessor.bb, o: o + _accessor.position) } - /// The type of values in indicesBuffer - public var indicesType: org_apache_arrow_flatbuf_Int! { let o = _accessor.offset(VTOFFSET.indicesType.v); return org_apache_arrow_flatbuf_Int(_accessor.bb, o: _accessor.indirect(o + _accessor.position)) } - /// indicesBuffer stores the location and size of the array that - /// contains the column indices of the corresponding non-zero values. - /// The type of index value is long. - /// - /// For example, the indices of the above X is: - /// ```text - /// indices(X) = [1, 2, 2, 1, 3, 0, 2, 3, 1]. - /// ``` - /// Note that the indices are sorted in lexicographical order for each row. - public var indicesBuffer: org_apache_arrow_flatbuf_Buffer! { let o = _accessor.offset(VTOFFSET.indicesBuffer.v); return _accessor.readBuffer(of: org_apache_arrow_flatbuf_Buffer.self, at: o) } - public var mutableIndicesBuffer: org_apache_arrow_flatbuf_Buffer_Mutable! { let o = _accessor.offset(VTOFFSET.indicesBuffer.v); return org_apache_arrow_flatbuf_Buffer_Mutable(_accessor.bb, o: o + _accessor.position) } - public static func startSparseMatrixIndexCSX(_ fbb: inout FlatBufferBuilder) -> UOffset { fbb.startTable(with: 5) } - public static func add(compressedAxis: org_apache_arrow_flatbuf_SparseMatrixCompressedAxis, _ fbb: inout FlatBufferBuilder) { fbb.add(element: compressedAxis.rawValue, def: 0, at: VTOFFSET.compressedAxis.p) } - public static func add(indptrType: Offset, _ fbb: inout FlatBufferBuilder) { fbb.add(offset: indptrType, at: VTOFFSET.indptrType.p) } - public static func add(indptrBuffer: org_apache_arrow_flatbuf_Buffer?, _ fbb: inout FlatBufferBuilder) { guard let indptrBuffer = indptrBuffer else { return }; fbb.create(struct: indptrBuffer, position: VTOFFSET.indptrBuffer.p) } - public static func add(indicesType: Offset, _ fbb: inout FlatBufferBuilder) { fbb.add(offset: indicesType, at: VTOFFSET.indicesType.p) } - public static func add(indicesBuffer: org_apache_arrow_flatbuf_Buffer?, _ fbb: inout FlatBufferBuilder) { guard let indicesBuffer = indicesBuffer else { return }; fbb.create(struct: indicesBuffer, position: VTOFFSET.indicesBuffer.p) } - public static func endSparseMatrixIndexCSX(_ fbb: inout FlatBufferBuilder, start: UOffset) -> Offset { let end = Offset(offset: fbb.endTable(at: start)); fbb.require(table: end, fields: [6, 8, 10, 12]); return end } - public static func createSparseMatrixIndexCSX( - _ fbb: inout FlatBufferBuilder, - compressedAxis: org_apache_arrow_flatbuf_SparseMatrixCompressedAxis = .row, - indptrTypeOffset indptrType: Offset, - indptrBuffer: org_apache_arrow_flatbuf_Buffer, - indicesTypeOffset indicesType: Offset, - indicesBuffer: org_apache_arrow_flatbuf_Buffer - ) -> Offset { - let __start = org_apache_arrow_flatbuf_SparseMatrixIndexCSX.startSparseMatrixIndexCSX(&fbb) - org_apache_arrow_flatbuf_SparseMatrixIndexCSX.add(compressedAxis: compressedAxis, &fbb) - org_apache_arrow_flatbuf_SparseMatrixIndexCSX.add(indptrType: indptrType, &fbb) - org_apache_arrow_flatbuf_SparseMatrixIndexCSX.add(indptrBuffer: indptrBuffer, &fbb) - org_apache_arrow_flatbuf_SparseMatrixIndexCSX.add(indicesType: indicesType, &fbb) - org_apache_arrow_flatbuf_SparseMatrixIndexCSX.add(indicesBuffer: indicesBuffer, &fbb) - return org_apache_arrow_flatbuf_SparseMatrixIndexCSX.endSparseMatrixIndexCSX(&fbb, start: __start) - } - - public static func verify(_ verifier: inout Verifier, at position: Int, of type: T.Type) throws where T: Verifiable { - var _v = try verifier.visitTable(at: position) - try _v.visit(field: VTOFFSET.compressedAxis.p, fieldName: "compressedAxis", required: false, type: org_apache_arrow_flatbuf_SparseMatrixCompressedAxis.self) - try _v.visit(field: VTOFFSET.indptrType.p, fieldName: "indptrType", required: true, type: ForwardOffset.self) - try _v.visit(field: VTOFFSET.indptrBuffer.p, fieldName: "indptrBuffer", required: true, type: org_apache_arrow_flatbuf_Buffer.self) - try _v.visit(field: VTOFFSET.indicesType.p, fieldName: "indicesType", required: true, type: ForwardOffset.self) - try _v.visit(field: VTOFFSET.indicesBuffer.p, fieldName: "indicesBuffer", required: true, type: org_apache_arrow_flatbuf_Buffer.self) - _v.finish() - } -} - -/// Compressed Sparse Fiber (CSF) sparse tensor index. -public struct org_apache_arrow_flatbuf_SparseTensorIndexCSF: FlatBufferObject, Verifiable { - - static func validateVersion() { FlatBuffersVersion_23_1_4() } - public var __buffer: ByteBuffer! { return _accessor.bb } - private var _accessor: Table - - public static func getRootAsSparseTensorIndexCSF(bb: ByteBuffer) -> org_apache_arrow_flatbuf_SparseTensorIndexCSF { return org_apache_arrow_flatbuf_SparseTensorIndexCSF(Table(bb: bb, position: Int32(bb.read(def: UOffset.self, position: bb.reader)) + Int32(bb.reader))) } - - private init(_ t: Table) { _accessor = t } - public init(_ bb: ByteBuffer, o: Int32) { _accessor = Table(bb: bb, position: o) } - - private enum VTOFFSET: VOffset { - case indptrType = 4 - case indptrBuffers = 6 - case indicesType = 8 - case indicesBuffers = 10 - case axisOrder = 12 - var v: Int32 { Int32(self.rawValue) } - var p: VOffset { self.rawValue } - } - - /// CSF is a generalization of compressed sparse row (CSR) index. - /// See [smith2017knl](http://shaden.io/pub-files/smith2017knl.pdf) - /// - /// CSF index recursively compresses each dimension of a tensor into a set - /// of prefix trees. Each path from a root to leaf forms one tensor - /// non-zero index. CSF is implemented with two arrays of buffers and one - /// arrays of integers. - /// - /// For example, let X be a 2x3x4x5 tensor and let it have the following - /// 8 non-zero values: - /// ```text - /// X[0, 0, 0, 1] := 1 - /// X[0, 0, 0, 2] := 2 - /// X[0, 1, 0, 0] := 3 - /// X[0, 1, 0, 2] := 4 - /// X[0, 1, 1, 0] := 5 - /// X[1, 1, 1, 0] := 6 - /// X[1, 1, 1, 1] := 7 - /// X[1, 1, 1, 2] := 8 - /// ``` - /// As a prefix tree this would be represented as: - /// ```text - /// 0 1 - /// / \ | - /// 0 1 1 - /// / / \ | - /// 0 0 1 1 - /// /| /| | /| | - /// 1 2 0 2 0 0 1 2 - /// ``` - /// The type of values in indptrBuffers - public var indptrType: org_apache_arrow_flatbuf_Int! { let o = _accessor.offset(VTOFFSET.indptrType.v); return org_apache_arrow_flatbuf_Int(_accessor.bb, o: _accessor.indirect(o + _accessor.position)) } - /// indptrBuffers stores the sparsity structure. - /// Each two consecutive dimensions in a tensor correspond to a buffer in - /// indptrBuffers. A pair of consecutive values at `indptrBuffers[dim][i]` - /// and `indptrBuffers[dim][i + 1]` signify a range of nodes in - /// `indicesBuffers[dim + 1]` who are children of `indicesBuffers[dim][i]` node. - /// - /// For example, the indptrBuffers for the above X is: - /// ```text - /// indptrBuffer(X) = [ - /// [0, 2, 3], - /// [0, 1, 3, 4], - /// [0, 2, 4, 5, 8] - /// ]. - /// ``` - public var hasIndptrBuffers: Bool { let o = _accessor.offset(VTOFFSET.indptrBuffers.v); return o == 0 ? false : true } - public var indptrBuffersCount: Int32 { let o = _accessor.offset(VTOFFSET.indptrBuffers.v); return o == 0 ? 0 : _accessor.vector(count: o) } - public func indptrBuffers(at index: Int32) -> org_apache_arrow_flatbuf_Buffer? { let o = _accessor.offset(VTOFFSET.indptrBuffers.v); return o == 0 ? nil : _accessor.directRead(of: org_apache_arrow_flatbuf_Buffer.self, offset: _accessor.vector(at: o) + index * 16) } - public func mutableIndptrBuffers(at index: Int32) -> org_apache_arrow_flatbuf_Buffer_Mutable? { let o = _accessor.offset(VTOFFSET.indptrBuffers.v); return o == 0 ? nil : org_apache_arrow_flatbuf_Buffer_Mutable(_accessor.bb, o: _accessor.vector(at: o) + index * 16) } - /// The type of values in indicesBuffers - public var indicesType: org_apache_arrow_flatbuf_Int! { let o = _accessor.offset(VTOFFSET.indicesType.v); return org_apache_arrow_flatbuf_Int(_accessor.bb, o: _accessor.indirect(o + _accessor.position)) } - /// indicesBuffers stores values of nodes. - /// Each tensor dimension corresponds to a buffer in indicesBuffers. - /// For example, the indicesBuffers for the above X is: - /// ```text - /// indicesBuffer(X) = [ - /// [0, 1], - /// [0, 1, 1], - /// [0, 0, 1, 1], - /// [1, 2, 0, 2, 0, 0, 1, 2] - /// ]. - /// ``` - public var hasIndicesBuffers: Bool { let o = _accessor.offset(VTOFFSET.indicesBuffers.v); return o == 0 ? false : true } - public var indicesBuffersCount: Int32 { let o = _accessor.offset(VTOFFSET.indicesBuffers.v); return o == 0 ? 0 : _accessor.vector(count: o) } - public func indicesBuffers(at index: Int32) -> org_apache_arrow_flatbuf_Buffer? { let o = _accessor.offset(VTOFFSET.indicesBuffers.v); return o == 0 ? nil : _accessor.directRead(of: org_apache_arrow_flatbuf_Buffer.self, offset: _accessor.vector(at: o) + index * 16) } - public func mutableIndicesBuffers(at index: Int32) -> org_apache_arrow_flatbuf_Buffer_Mutable? { let o = _accessor.offset(VTOFFSET.indicesBuffers.v); return o == 0 ? nil : org_apache_arrow_flatbuf_Buffer_Mutable(_accessor.bb, o: _accessor.vector(at: o) + index * 16) } - /// axisOrder stores the sequence in which dimensions were traversed to - /// produce the prefix tree. - /// For example, the axisOrder for the above X is: - /// ```text - /// axisOrder(X) = [0, 1, 2, 3]. - /// ``` - public var hasAxisOrder: Bool { let o = _accessor.offset(VTOFFSET.axisOrder.v); return o == 0 ? false : true } - public var axisOrderCount: Int32 { let o = _accessor.offset(VTOFFSET.axisOrder.v); return o == 0 ? 0 : _accessor.vector(count: o) } - public func axisOrder(at index: Int32) -> Int32 { let o = _accessor.offset(VTOFFSET.axisOrder.v); return o == 0 ? 0 : _accessor.directRead(of: Int32.self, offset: _accessor.vector(at: o) + index * 4) } - public var axisOrder: [Int32] { return _accessor.getVector(at: VTOFFSET.axisOrder.v) ?? [] } - public static func startSparseTensorIndexCSF(_ fbb: inout FlatBufferBuilder) -> UOffset { fbb.startTable(with: 5) } - public static func add(indptrType: Offset, _ fbb: inout FlatBufferBuilder) { fbb.add(offset: indptrType, at: VTOFFSET.indptrType.p) } - public static func addVectorOf(indptrBuffers: Offset, _ fbb: inout FlatBufferBuilder) { fbb.add(offset: indptrBuffers, at: VTOFFSET.indptrBuffers.p) } - public static func startVectorOfIndptrBuffers(_ size: Int, in builder: inout FlatBufferBuilder) { - builder.startVector(size * MemoryLayout.size, elementSize: MemoryLayout.alignment) - } - public static func add(indicesType: Offset, _ fbb: inout FlatBufferBuilder) { fbb.add(offset: indicesType, at: VTOFFSET.indicesType.p) } - public static func addVectorOf(indicesBuffers: Offset, _ fbb: inout FlatBufferBuilder) { fbb.add(offset: indicesBuffers, at: VTOFFSET.indicesBuffers.p) } - public static func startVectorOfIndicesBuffers(_ size: Int, in builder: inout FlatBufferBuilder) { - builder.startVector(size * MemoryLayout.size, elementSize: MemoryLayout.alignment) - } - public static func addVectorOf(axisOrder: Offset, _ fbb: inout FlatBufferBuilder) { fbb.add(offset: axisOrder, at: VTOFFSET.axisOrder.p) } - public static func endSparseTensorIndexCSF(_ fbb: inout FlatBufferBuilder, start: UOffset) -> Offset { let end = Offset(offset: fbb.endTable(at: start)); fbb.require(table: end, fields: [4, 6, 8, 10, 12]); return end } - public static func createSparseTensorIndexCSF( - _ fbb: inout FlatBufferBuilder, - indptrTypeOffset indptrType: Offset, - indptrBuffersVectorOffset indptrBuffers: Offset, - indicesTypeOffset indicesType: Offset, - indicesBuffersVectorOffset indicesBuffers: Offset, - axisOrderVectorOffset axisOrder: Offset - ) -> Offset { - let __start = org_apache_arrow_flatbuf_SparseTensorIndexCSF.startSparseTensorIndexCSF(&fbb) - org_apache_arrow_flatbuf_SparseTensorIndexCSF.add(indptrType: indptrType, &fbb) - org_apache_arrow_flatbuf_SparseTensorIndexCSF.addVectorOf(indptrBuffers: indptrBuffers, &fbb) - org_apache_arrow_flatbuf_SparseTensorIndexCSF.add(indicesType: indicesType, &fbb) - org_apache_arrow_flatbuf_SparseTensorIndexCSF.addVectorOf(indicesBuffers: indicesBuffers, &fbb) - org_apache_arrow_flatbuf_SparseTensorIndexCSF.addVectorOf(axisOrder: axisOrder, &fbb) - return org_apache_arrow_flatbuf_SparseTensorIndexCSF.endSparseTensorIndexCSF(&fbb, start: __start) - } - - public static func verify(_ verifier: inout Verifier, at position: Int, of type: T.Type) throws where T: Verifiable { - var _v = try verifier.visitTable(at: position) - try _v.visit(field: VTOFFSET.indptrType.p, fieldName: "indptrType", required: true, type: ForwardOffset.self) - try _v.visit(field: VTOFFSET.indptrBuffers.p, fieldName: "indptrBuffers", required: true, type: ForwardOffset>.self) - try _v.visit(field: VTOFFSET.indicesType.p, fieldName: "indicesType", required: true, type: ForwardOffset.self) - try _v.visit(field: VTOFFSET.indicesBuffers.p, fieldName: "indicesBuffers", required: true, type: ForwardOffset>.self) - try _v.visit(field: VTOFFSET.axisOrder.p, fieldName: "axisOrder", required: true, type: ForwardOffset>.self) - _v.finish() - } -} - -public struct org_apache_arrow_flatbuf_SparseTensor: FlatBufferObject, Verifiable { - - static func validateVersion() { FlatBuffersVersion_23_1_4() } - public var __buffer: ByteBuffer! { return _accessor.bb } - private var _accessor: Table - - public static func getRootAsSparseTensor(bb: ByteBuffer) -> org_apache_arrow_flatbuf_SparseTensor { return org_apache_arrow_flatbuf_SparseTensor(Table(bb: bb, position: Int32(bb.read(def: UOffset.self, position: bb.reader)) + Int32(bb.reader))) } - - private init(_ t: Table) { _accessor = t } - public init(_ bb: ByteBuffer, o: Int32) { _accessor = Table(bb: bb, position: o) } - - private enum VTOFFSET: VOffset { - case typeType = 4 - case type = 6 - case shape = 8 - case nonZeroLength = 10 - case sparseIndexType = 12 - case sparseIndex = 14 - case data = 16 - var v: Int32 { Int32(self.rawValue) } - var p: VOffset { self.rawValue } - } - - public var typeType: org_apache_arrow_flatbuf_Type_ { let o = _accessor.offset(VTOFFSET.typeType.v); return o == 0 ? .none_ : org_apache_arrow_flatbuf_Type_(rawValue: _accessor.readBuffer(of: UInt8.self, at: o)) ?? .none_ } - /// The type of data contained in a value cell. - /// Currently only fixed-width value types are supported, - /// no strings or nested types. - public func type(type: T.Type) -> T! { let o = _accessor.offset(VTOFFSET.type.v); return _accessor.union(o) } - /// The dimensions of the tensor, optionally named. - public var hasShape: Bool { let o = _accessor.offset(VTOFFSET.shape.v); return o == 0 ? false : true } - public var shapeCount: Int32 { let o = _accessor.offset(VTOFFSET.shape.v); return o == 0 ? 0 : _accessor.vector(count: o) } - public func shape(at index: Int32) -> org_apache_arrow_flatbuf_TensorDim? { let o = _accessor.offset(VTOFFSET.shape.v); return o == 0 ? nil : org_apache_arrow_flatbuf_TensorDim(_accessor.bb, o: _accessor.indirect(_accessor.vector(at: o) + index * 4)) } - /// The number of non-zero values in a sparse tensor. - public var nonZeroLength: Int64 { let o = _accessor.offset(VTOFFSET.nonZeroLength.v); return o == 0 ? 0 : _accessor.readBuffer(of: Int64.self, at: o) } - public var sparseIndexType: org_apache_arrow_flatbuf_SparseTensorIndex { let o = _accessor.offset(VTOFFSET.sparseIndexType.v); return o == 0 ? .none_ : org_apache_arrow_flatbuf_SparseTensorIndex(rawValue: _accessor.readBuffer(of: UInt8.self, at: o)) ?? .none_ } - /// Sparse tensor index - public func sparseIndex(type: T.Type) -> T! { let o = _accessor.offset(VTOFFSET.sparseIndex.v); return _accessor.union(o) } - /// The location and size of the tensor's data - public var data: org_apache_arrow_flatbuf_Buffer! { let o = _accessor.offset(VTOFFSET.data.v); return _accessor.readBuffer(of: org_apache_arrow_flatbuf_Buffer.self, at: o) } - public var mutableData: org_apache_arrow_flatbuf_Buffer_Mutable! { let o = _accessor.offset(VTOFFSET.data.v); return org_apache_arrow_flatbuf_Buffer_Mutable(_accessor.bb, o: o + _accessor.position) } - public static func startSparseTensor(_ fbb: inout FlatBufferBuilder) -> UOffset { fbb.startTable(with: 7) } - public static func add(typeType: org_apache_arrow_flatbuf_Type_, _ fbb: inout FlatBufferBuilder) { fbb.add(element: typeType.rawValue, def: 0, at: VTOFFSET.typeType.p) } - public static func add(type: Offset, _ fbb: inout FlatBufferBuilder) { fbb.add(offset: type, at: VTOFFSET.type.p) } - public static func addVectorOf(shape: Offset, _ fbb: inout FlatBufferBuilder) { fbb.add(offset: shape, at: VTOFFSET.shape.p) } - public static func add(nonZeroLength: Int64, _ fbb: inout FlatBufferBuilder) { fbb.add(element: nonZeroLength, def: 0, at: VTOFFSET.nonZeroLength.p) } - public static func add(sparseIndexType: org_apache_arrow_flatbuf_SparseTensorIndex, _ fbb: inout FlatBufferBuilder) { fbb.add(element: sparseIndexType.rawValue, def: 0, at: VTOFFSET.sparseIndexType.p) } - public static func add(sparseIndex: Offset, _ fbb: inout FlatBufferBuilder) { fbb.add(offset: sparseIndex, at: VTOFFSET.sparseIndex.p) } - public static func add(data: org_apache_arrow_flatbuf_Buffer?, _ fbb: inout FlatBufferBuilder) { guard let data = data else { return }; fbb.create(struct: data, position: VTOFFSET.data.p) } - public static func endSparseTensor(_ fbb: inout FlatBufferBuilder, start: UOffset) -> Offset { let end = Offset(offset: fbb.endTable(at: start)); fbb.require(table: end, fields: [6, 8, 14, 16]); return end } - public static func createSparseTensor( - _ fbb: inout FlatBufferBuilder, - typeType: org_apache_arrow_flatbuf_Type_ = .none_, - typeOffset type: Offset, - shapeVectorOffset shape: Offset, - nonZeroLength: Int64 = 0, - sparseIndexType: org_apache_arrow_flatbuf_SparseTensorIndex = .none_, - sparseIndexOffset sparseIndex: Offset, - data: org_apache_arrow_flatbuf_Buffer - ) -> Offset { - let __start = org_apache_arrow_flatbuf_SparseTensor.startSparseTensor(&fbb) - org_apache_arrow_flatbuf_SparseTensor.add(typeType: typeType, &fbb) - org_apache_arrow_flatbuf_SparseTensor.add(type: type, &fbb) - org_apache_arrow_flatbuf_SparseTensor.addVectorOf(shape: shape, &fbb) - org_apache_arrow_flatbuf_SparseTensor.add(nonZeroLength: nonZeroLength, &fbb) - org_apache_arrow_flatbuf_SparseTensor.add(sparseIndexType: sparseIndexType, &fbb) - org_apache_arrow_flatbuf_SparseTensor.add(sparseIndex: sparseIndex, &fbb) - org_apache_arrow_flatbuf_SparseTensor.add(data: data, &fbb) - return org_apache_arrow_flatbuf_SparseTensor.endSparseTensor(&fbb, start: __start) - } - - public static func verify(_ verifier: inout Verifier, at position: Int, of type: T.Type) throws where T: Verifiable { - var _v = try verifier.visitTable(at: position) - try _v.visit(unionKey: VTOFFSET.typeType.p, unionField: VTOFFSET.type.p, unionKeyName: "typeType", fieldName: "type", required: true, completion: { (verifier, key: org_apache_arrow_flatbuf_Type_, pos) in - switch key { - case .none_: - break // NOTE - SWIFT doesnt support none - case .null: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_Null.self) - case .int: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_Int.self) - case .floatingpoint: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_FloatingPoint.self) - case .binary: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_Binary.self) - case .utf8: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_Utf8.self) - case .bool: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_Bool.self) - case .decimal: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_Decimal.self) - case .date: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_Date.self) - case .time: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_Time.self) - case .timestamp: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_Timestamp.self) - case .interval: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_Interval.self) - case .list: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_List.self) - case .struct_: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_Struct_.self) - case .union: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_Union.self) - case .fixedsizebinary: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_FixedSizeBinary.self) - case .fixedsizelist: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_FixedSizeList.self) - case .map: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_Map.self) - case .duration: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_Duration.self) - case .largebinary: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_LargeBinary.self) - case .largeutf8: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_LargeUtf8.self) - case .largelist: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_LargeList.self) - case .runendencoded: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_RunEndEncoded.self) - } - }) - try _v.visit(field: VTOFFSET.shape.p, fieldName: "shape", required: true, type: ForwardOffset, org_apache_arrow_flatbuf_TensorDim>>.self) - try _v.visit(field: VTOFFSET.nonZeroLength.p, fieldName: "nonZeroLength", required: false, type: Int64.self) - try _v.visit(unionKey: VTOFFSET.sparseIndexType.p, unionField: VTOFFSET.sparseIndex.p, unionKeyName: "sparseIndexType", fieldName: "sparseIndex", required: true, completion: { (verifier, key: org_apache_arrow_flatbuf_SparseTensorIndex, pos) in - switch key { - case .none_: - break // NOTE - SWIFT doesnt support none - case .sparsetensorindexcoo: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_SparseTensorIndexCOO.self) - case .sparsematrixindexcsx: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_SparseMatrixIndexCSX.self) - case .sparsetensorindexcsf: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_SparseTensorIndexCSF.self) - } - }) - try _v.visit(field: VTOFFSET.data.p, fieldName: "data", required: true, type: org_apache_arrow_flatbuf_Buffer.self) - _v.finish() - } -} - diff --git a/swift/Arrow/Sources/Arrow/Tensor_generated.swift b/swift/Arrow/Sources/Arrow/Tensor_generated.swift deleted file mode 100644 index e9778d0b524..00000000000 --- a/swift/Arrow/Sources/Arrow/Tensor_generated.swift +++ /dev/null @@ -1,193 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -// automatically generated by the FlatBuffers compiler, do not modify -// swiftlint:disable all -// swiftformat:disable all - -import FlatBuffers - -/// ---------------------------------------------------------------------- -/// Data structures for dense tensors -/// Shape data for a single axis in a tensor -public struct org_apache_arrow_flatbuf_TensorDim: FlatBufferObject, Verifiable { - - static func validateVersion() { FlatBuffersVersion_23_1_4() } - public var __buffer: ByteBuffer! { return _accessor.bb } - private var _accessor: Table - - public static func getRootAsTensorDim(bb: ByteBuffer) -> org_apache_arrow_flatbuf_TensorDim { return org_apache_arrow_flatbuf_TensorDim(Table(bb: bb, position: Int32(bb.read(def: UOffset.self, position: bb.reader)) + Int32(bb.reader))) } - - private init(_ t: Table) { _accessor = t } - public init(_ bb: ByteBuffer, o: Int32) { _accessor = Table(bb: bb, position: o) } - - private enum VTOFFSET: VOffset { - case size = 4 - case name = 6 - var v: Int32 { Int32(self.rawValue) } - var p: VOffset { self.rawValue } - } - - /// Length of dimension - public var size: Int64 { let o = _accessor.offset(VTOFFSET.size.v); return o == 0 ? 0 : _accessor.readBuffer(of: Int64.self, at: o) } - /// Name of the dimension, optional - public var name: String? { let o = _accessor.offset(VTOFFSET.name.v); return o == 0 ? nil : _accessor.string(at: o) } - public var nameSegmentArray: [UInt8]? { return _accessor.getVector(at: VTOFFSET.name.v) } - public static func startTensorDim(_ fbb: inout FlatBufferBuilder) -> UOffset { fbb.startTable(with: 2) } - public static func add(size: Int64, _ fbb: inout FlatBufferBuilder) { fbb.add(element: size, def: 0, at: VTOFFSET.size.p) } - public static func add(name: Offset, _ fbb: inout FlatBufferBuilder) { fbb.add(offset: name, at: VTOFFSET.name.p) } - public static func endTensorDim(_ fbb: inout FlatBufferBuilder, start: UOffset) -> Offset { let end = Offset(offset: fbb.endTable(at: start)); return end } - public static func createTensorDim( - _ fbb: inout FlatBufferBuilder, - size: Int64 = 0, - nameOffset name: Offset = Offset() - ) -> Offset { - let __start = org_apache_arrow_flatbuf_TensorDim.startTensorDim(&fbb) - org_apache_arrow_flatbuf_TensorDim.add(size: size, &fbb) - org_apache_arrow_flatbuf_TensorDim.add(name: name, &fbb) - return org_apache_arrow_flatbuf_TensorDim.endTensorDim(&fbb, start: __start) - } - - public static func verify(_ verifier: inout Verifier, at position: Int, of type: T.Type) throws where T: Verifiable { - var _v = try verifier.visitTable(at: position) - try _v.visit(field: VTOFFSET.size.p, fieldName: "size", required: false, type: Int64.self) - try _v.visit(field: VTOFFSET.name.p, fieldName: "name", required: false, type: ForwardOffset.self) - _v.finish() - } -} - -public struct org_apache_arrow_flatbuf_Tensor: FlatBufferObject, Verifiable { - - static func validateVersion() { FlatBuffersVersion_23_1_4() } - public var __buffer: ByteBuffer! { return _accessor.bb } - private var _accessor: Table - - public static func getRootAsTensor(bb: ByteBuffer) -> org_apache_arrow_flatbuf_Tensor { return org_apache_arrow_flatbuf_Tensor(Table(bb: bb, position: Int32(bb.read(def: UOffset.self, position: bb.reader)) + Int32(bb.reader))) } - - private init(_ t: Table) { _accessor = t } - public init(_ bb: ByteBuffer, o: Int32) { _accessor = Table(bb: bb, position: o) } - - private enum VTOFFSET: VOffset { - case typeType = 4 - case type = 6 - case shape = 8 - case strides = 10 - case data = 12 - var v: Int32 { Int32(self.rawValue) } - var p: VOffset { self.rawValue } - } - - public var typeType: org_apache_arrow_flatbuf_Type_ { let o = _accessor.offset(VTOFFSET.typeType.v); return o == 0 ? .none_ : org_apache_arrow_flatbuf_Type_(rawValue: _accessor.readBuffer(of: UInt8.self, at: o)) ?? .none_ } - /// The type of data contained in a value cell. Currently only fixed-width - /// value types are supported, no strings or nested types - public func type(type: T.Type) -> T! { let o = _accessor.offset(VTOFFSET.type.v); return _accessor.union(o) } - /// The dimensions of the tensor, optionally named - public var hasShape: Bool { let o = _accessor.offset(VTOFFSET.shape.v); return o == 0 ? false : true } - public var shapeCount: Int32 { let o = _accessor.offset(VTOFFSET.shape.v); return o == 0 ? 0 : _accessor.vector(count: o) } - public func shape(at index: Int32) -> org_apache_arrow_flatbuf_TensorDim? { let o = _accessor.offset(VTOFFSET.shape.v); return o == 0 ? nil : org_apache_arrow_flatbuf_TensorDim(_accessor.bb, o: _accessor.indirect(_accessor.vector(at: o) + index * 4)) } - /// Non-negative byte offsets to advance one value cell along each dimension - /// If omitted, default to row-major order (C-like). - public var hasStrides: Bool { let o = _accessor.offset(VTOFFSET.strides.v); return o == 0 ? false : true } - public var stridesCount: Int32 { let o = _accessor.offset(VTOFFSET.strides.v); return o == 0 ? 0 : _accessor.vector(count: o) } - public func strides(at index: Int32) -> Int64 { let o = _accessor.offset(VTOFFSET.strides.v); return o == 0 ? 0 : _accessor.directRead(of: Int64.self, offset: _accessor.vector(at: o) + index * 8) } - public var strides: [Int64] { return _accessor.getVector(at: VTOFFSET.strides.v) ?? [] } - /// The location and size of the tensor's data - public var data: org_apache_arrow_flatbuf_Buffer! { let o = _accessor.offset(VTOFFSET.data.v); return _accessor.readBuffer(of: org_apache_arrow_flatbuf_Buffer.self, at: o) } - public var mutableData: org_apache_arrow_flatbuf_Buffer_Mutable! { let o = _accessor.offset(VTOFFSET.data.v); return org_apache_arrow_flatbuf_Buffer_Mutable(_accessor.bb, o: o + _accessor.position) } - public static func startTensor(_ fbb: inout FlatBufferBuilder) -> UOffset { fbb.startTable(with: 5) } - public static func add(typeType: org_apache_arrow_flatbuf_Type_, _ fbb: inout FlatBufferBuilder) { fbb.add(element: typeType.rawValue, def: 0, at: VTOFFSET.typeType.p) } - public static func add(type: Offset, _ fbb: inout FlatBufferBuilder) { fbb.add(offset: type, at: VTOFFSET.type.p) } - public static func addVectorOf(shape: Offset, _ fbb: inout FlatBufferBuilder) { fbb.add(offset: shape, at: VTOFFSET.shape.p) } - public static func addVectorOf(strides: Offset, _ fbb: inout FlatBufferBuilder) { fbb.add(offset: strides, at: VTOFFSET.strides.p) } - public static func add(data: org_apache_arrow_flatbuf_Buffer?, _ fbb: inout FlatBufferBuilder) { guard let data = data else { return }; fbb.create(struct: data, position: VTOFFSET.data.p) } - public static func endTensor(_ fbb: inout FlatBufferBuilder, start: UOffset) -> Offset { let end = Offset(offset: fbb.endTable(at: start)); fbb.require(table: end, fields: [6, 8, 12]); return end } - public static func createTensor( - _ fbb: inout FlatBufferBuilder, - typeType: org_apache_arrow_flatbuf_Type_ = .none_, - typeOffset type: Offset, - shapeVectorOffset shape: Offset, - stridesVectorOffset strides: Offset = Offset(), - data: org_apache_arrow_flatbuf_Buffer - ) -> Offset { - let __start = org_apache_arrow_flatbuf_Tensor.startTensor(&fbb) - org_apache_arrow_flatbuf_Tensor.add(typeType: typeType, &fbb) - org_apache_arrow_flatbuf_Tensor.add(type: type, &fbb) - org_apache_arrow_flatbuf_Tensor.addVectorOf(shape: shape, &fbb) - org_apache_arrow_flatbuf_Tensor.addVectorOf(strides: strides, &fbb) - org_apache_arrow_flatbuf_Tensor.add(data: data, &fbb) - return org_apache_arrow_flatbuf_Tensor.endTensor(&fbb, start: __start) - } - - public static func verify(_ verifier: inout Verifier, at position: Int, of type: T.Type) throws where T: Verifiable { - var _v = try verifier.visitTable(at: position) - try _v.visit(unionKey: VTOFFSET.typeType.p, unionField: VTOFFSET.type.p, unionKeyName: "typeType", fieldName: "type", required: true, completion: { (verifier, key: org_apache_arrow_flatbuf_Type_, pos) in - switch key { - case .none_: - break // NOTE - SWIFT doesnt support none - case .null: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_Null.self) - case .int: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_Int.self) - case .floatingpoint: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_FloatingPoint.self) - case .binary: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_Binary.self) - case .utf8: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_Utf8.self) - case .bool: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_Bool.self) - case .decimal: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_Decimal.self) - case .date: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_Date.self) - case .time: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_Time.self) - case .timestamp: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_Timestamp.self) - case .interval: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_Interval.self) - case .list: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_List.self) - case .struct_: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_Struct_.self) - case .union: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_Union.self) - case .fixedsizebinary: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_FixedSizeBinary.self) - case .fixedsizelist: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_FixedSizeList.self) - case .map: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_Map.self) - case .duration: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_Duration.self) - case .largebinary: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_LargeBinary.self) - case .largeutf8: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_LargeUtf8.self) - case .largelist: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_LargeList.self) - case .runendencoded: - try ForwardOffset.verify(&verifier, at: pos, of: org_apache_arrow_flatbuf_RunEndEncoded.self) - } - }) - try _v.visit(field: VTOFFSET.shape.p, fieldName: "shape", required: true, type: ForwardOffset, org_apache_arrow_flatbuf_TensorDim>>.self) - try _v.visit(field: VTOFFSET.strides.p, fieldName: "strides", required: false, type: ForwardOffset>.self) - try _v.visit(field: VTOFFSET.data.p, fieldName: "data", required: true, type: org_apache_arrow_flatbuf_Buffer.self) - _v.finish() - } -} - diff --git a/swift/Arrow/Sources/ArrowC/ArrowCData.c b/swift/Arrow/Sources/ArrowC/ArrowCData.c deleted file mode 100644 index fe0f8089971..00000000000 --- a/swift/Arrow/Sources/ArrowC/ArrowCData.c +++ /dev/null @@ -1,31 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -#include -#include "include/ArrowCData.h" - -void ArrowSwiftClearReleaseSchema(struct ArrowSchema* arrowSchema) { - if(arrowSchema) { - arrowSchema->release = NULL; - } -} - -void ArrowSwiftClearReleaseArray(struct ArrowArray* arrowArray) { - if(arrowArray) { - arrowArray->release = NULL; - } -} diff --git a/swift/Arrow/Sources/ArrowC/include/ArrowCData.h b/swift/Arrow/Sources/ArrowC/include/ArrowCData.h deleted file mode 100644 index 4b2f35efcb9..00000000000 --- a/swift/Arrow/Sources/ArrowC/include/ArrowCData.h +++ /dev/null @@ -1,78 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -#ifndef ARROW_C_DATA_INTERFACE -#define ARROW_C_DATA_INTERFACE - -#define ARROW_FLAG_DICTIONARY_ORDERED 1 -#define ARROW_FLAG_NULLABLE 2 -#define ARROW_FLAG_MAP_KEYS_SORTED 4 - -#include // For int64_t - -#ifdef __cplusplus -extern "C" { -#endif - -struct ArrowSchema { - // Array type description - const char* format; - const char* name; - const char* metadata; - int64_t flags; - int64_t n_children; - struct ArrowSchema** children; - struct ArrowSchema* dictionary; - - // Release callback - void (*release)(struct ArrowSchema*); - // Opaque producer-specific data - void* private_data; -}; - -struct ArrowArray { - // Array data description - int64_t length; - int64_t null_count; - int64_t offset; - int64_t n_buffers; - int64_t n_children; - const void** buffers; - struct ArrowArray** children; - struct ArrowArray* dictionary; - - // Release callback - void (*release)(struct ArrowArray*); - // Opaque producer-specific data - void* private_data; -}; - -// Not able to set the release on the schema -// to NULL in Swift. nil in Swift is not -// equivalent to NULL. -void ArrowSwiftClearReleaseSchema(struct ArrowSchema*); - -// Not able to set the release on the array -// to NULL in Swift. nil in Swift is not -// equivalent to NULL. -void ArrowSwiftClearReleaseArray(struct ArrowArray*); - -#ifdef __cplusplus -} -#endif - -#endif // ARROW_C_DATA_INTERFACE diff --git a/swift/Arrow/Tests/ArrowTests/ArrayBuilderTest.swift b/swift/Arrow/Tests/ArrowTests/ArrayBuilderTest.swift deleted file mode 100644 index 42e167f01fc..00000000000 --- a/swift/Arrow/Tests/ArrowTests/ArrayBuilderTest.swift +++ /dev/null @@ -1,85 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -import XCTest -@testable import Arrow - -final class ArrayBuilderTests: XCTestCase { - func testIsValidTypeForBuilder() throws { - XCTAssertTrue(ArrowArrayBuilders.isValidBuilderType(UInt8.self)) - XCTAssertTrue(ArrowArrayBuilders.isValidBuilderType(Int16.self)) - XCTAssertTrue(ArrowArrayBuilders.isValidBuilderType(Int32.self)) - XCTAssertTrue(ArrowArrayBuilders.isValidBuilderType(Int64.self)) - XCTAssertTrue(ArrowArrayBuilders.isValidBuilderType(UInt8.self)) - XCTAssertTrue(ArrowArrayBuilders.isValidBuilderType(UInt16.self)) - XCTAssertTrue(ArrowArrayBuilders.isValidBuilderType(UInt32.self)) - XCTAssertTrue(ArrowArrayBuilders.isValidBuilderType(UInt64.self)) - XCTAssertTrue(ArrowArrayBuilders.isValidBuilderType(Float.self)) - XCTAssertTrue(ArrowArrayBuilders.isValidBuilderType(Double.self)) - XCTAssertTrue(ArrowArrayBuilders.isValidBuilderType(Date.self)) - XCTAssertTrue(ArrowArrayBuilders.isValidBuilderType(Bool.self)) - XCTAssertTrue(ArrowArrayBuilders.isValidBuilderType(Int8?.self)) - XCTAssertTrue(ArrowArrayBuilders.isValidBuilderType(Int16?.self)) - XCTAssertTrue(ArrowArrayBuilders.isValidBuilderType(Int32?.self)) - XCTAssertTrue(ArrowArrayBuilders.isValidBuilderType(Int64?.self)) - XCTAssertTrue(ArrowArrayBuilders.isValidBuilderType(UInt8?.self)) - XCTAssertTrue(ArrowArrayBuilders.isValidBuilderType(UInt16?.self)) - XCTAssertTrue(ArrowArrayBuilders.isValidBuilderType(UInt32?.self)) - XCTAssertTrue(ArrowArrayBuilders.isValidBuilderType(UInt64?.self)) - XCTAssertTrue(ArrowArrayBuilders.isValidBuilderType(Float?.self)) - XCTAssertTrue(ArrowArrayBuilders.isValidBuilderType(Double?.self)) - XCTAssertTrue(ArrowArrayBuilders.isValidBuilderType(Date?.self)) - XCTAssertTrue(ArrowArrayBuilders.isValidBuilderType(Bool?.self)) - - XCTAssertFalse(ArrowArrayBuilders.isValidBuilderType(Int.self)) - XCTAssertFalse(ArrowArrayBuilders.isValidBuilderType(UInt.self)) - XCTAssertFalse(ArrowArrayBuilders.isValidBuilderType(Int?.self)) - XCTAssertFalse(ArrowArrayBuilders.isValidBuilderType(UInt?.self)) - } - - func testLoadArrayBuilders() throws { - XCTAssertNotNil(try ArrowArrayBuilders.loadBuilder(Int8.self)) - XCTAssertNotNil(try ArrowArrayBuilders.loadBuilder(Int16.self)) - XCTAssertNotNil(try ArrowArrayBuilders.loadBuilder(Int32.self)) - XCTAssertNotNil(try ArrowArrayBuilders.loadBuilder(Int64.self)) - XCTAssertNotNil(try ArrowArrayBuilders.loadBuilder(UInt8.self)) - XCTAssertNotNil(try ArrowArrayBuilders.loadBuilder(UInt16.self)) - XCTAssertNotNil(try ArrowArrayBuilders.loadBuilder(UInt32.self)) - XCTAssertNotNil(try ArrowArrayBuilders.loadBuilder(UInt64.self)) - XCTAssertNotNil(try ArrowArrayBuilders.loadBuilder(Float.self)) - XCTAssertNotNil(try ArrowArrayBuilders.loadBuilder(Double.self)) - XCTAssertNotNil(try ArrowArrayBuilders.loadBuilder(Date.self)) - XCTAssertNotNil(try ArrowArrayBuilders.loadBuilder(Bool.self)) - XCTAssertNotNil(try ArrowArrayBuilders.loadBuilder(Int8?.self)) - XCTAssertNotNil(try ArrowArrayBuilders.loadBuilder(Int16?.self)) - XCTAssertNotNil(try ArrowArrayBuilders.loadBuilder(Int32?.self)) - XCTAssertNotNil(try ArrowArrayBuilders.loadBuilder(Int64?.self)) - XCTAssertNotNil(try ArrowArrayBuilders.loadBuilder(UInt8?.self)) - XCTAssertNotNil(try ArrowArrayBuilders.loadBuilder(UInt16?.self)) - XCTAssertNotNil(try ArrowArrayBuilders.loadBuilder(UInt32?.self)) - XCTAssertNotNil(try ArrowArrayBuilders.loadBuilder(UInt64?.self)) - XCTAssertNotNil(try ArrowArrayBuilders.loadBuilder(Float?.self)) - XCTAssertNotNil(try ArrowArrayBuilders.loadBuilder(Double?.self)) - XCTAssertNotNil(try ArrowArrayBuilders.loadBuilder(Date?.self)) - XCTAssertNotNil(try ArrowArrayBuilders.loadBuilder(Bool?.self)) - - XCTAssertThrowsError(try ArrowArrayBuilders.loadBuilder(Int.self)) - XCTAssertThrowsError(try ArrowArrayBuilders.loadBuilder(UInt.self)) - XCTAssertThrowsError(try ArrowArrayBuilders.loadBuilder(Int?.self)) - XCTAssertThrowsError(try ArrowArrayBuilders.loadBuilder(UInt?.self)) - } -} diff --git a/swift/Arrow/Tests/ArrowTests/ArrayTests.swift b/swift/Arrow/Tests/ArrowTests/ArrayTests.swift deleted file mode 100644 index d793aa11dcb..00000000000 --- a/swift/Arrow/Tests/ArrowTests/ArrayTests.swift +++ /dev/null @@ -1,371 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -import XCTest -@testable import Arrow - -final class ArrayTests: XCTestCase { // swiftlint:disable:this type_body_length - func testPrimitiveArray() throws { - // This is an example of a functional test case. - // Use XCTAssert and related functions to verify your tests produce the correct - // results. - let arrayBuilder: NumberArrayBuilder = try ArrowArrayBuilders.loadNumberArrayBuilder() - for index in 0..<100 { - arrayBuilder.append(UInt8(index)) - } - - XCTAssertEqual(arrayBuilder.nullCount, 0) - arrayBuilder.append(nil) - XCTAssertEqual(arrayBuilder.length, 101) - XCTAssertEqual(arrayBuilder.capacity, 136) - XCTAssertEqual(arrayBuilder.nullCount, 1) - let array = try arrayBuilder.finish() - XCTAssertEqual(array.length, 101) - XCTAssertEqual(array[1]!, 1) - XCTAssertEqual(array[10]!, 10) - XCTAssertEqual(try array.isNull(100), true) - - let doubleBuilder: NumberArrayBuilder = try ArrowArrayBuilders.loadNumberArrayBuilder() - doubleBuilder.append(14) - doubleBuilder.append(40.4) - XCTAssertEqual(doubleBuilder.nullCount, 0) - XCTAssertEqual(doubleBuilder.length, 2) - XCTAssertEqual(doubleBuilder.capacity, 264) - let doubleArray = try doubleBuilder.finish() - XCTAssertEqual(doubleArray.length, 2) - XCTAssertEqual(doubleArray[0]!, 14) - XCTAssertEqual(doubleArray[1]!, 40.4) - } - - func testStringArray() throws { - let stringBuilder = try ArrowArrayBuilders.loadStringArrayBuilder() - for index in 0..<100 { - if index % 10 == 9 { - stringBuilder.append(nil) - } else { - stringBuilder.append("test" + String(index)) - } - } - - XCTAssertEqual(stringBuilder.nullCount, 10) - XCTAssertEqual(stringBuilder.length, 100) - XCTAssertEqual(stringBuilder.capacity, 648) - let stringArray = try stringBuilder.finish() - XCTAssertEqual(stringArray.length, 100) - for index in 0..) - for index in 0..<100 { - uint8HBuilder.appendAny(UInt8(index)) - } - - let uint8Holder = try uint8HBuilder.toHolder() - XCTAssertEqual(uint8Holder.nullCount, 0) - XCTAssertEqual(uint8Holder.length, 100) - - let stringHBuilder: ArrowArrayHolderBuilder = - (try ArrowArrayBuilders.loadStringArrayBuilder()) - for index in 0..<100 { - if index % 10 == 9 { - stringHBuilder.appendAny(nil) - } else { - stringHBuilder.appendAny("test" + String(index)) - } - } - - let stringHolder = try stringHBuilder.toHolder() - XCTAssertEqual(stringHolder.nullCount, 10) - XCTAssertEqual(stringHolder.length, 100) - } - - func testAddVArgs() throws { - let arrayBuilder: NumberArrayBuilder = try ArrowArrayBuilders.loadNumberArrayBuilder() - arrayBuilder.append(0, 1, 2, 3, 4, 5, 6, 7, 8, 9) - XCTAssertEqual(arrayBuilder.length, 10) - XCTAssertEqual(try arrayBuilder.finish()[2], 2) - let doubleBuilder: NumberArrayBuilder = try ArrowArrayBuilders.loadNumberArrayBuilder() - doubleBuilder.append(0, 1.1, 2.2, 3.3, 4.4, 5.5, 6.6, 7.7, 8.8) - XCTAssertEqual(doubleBuilder.length, 9) - XCTAssertEqual(try doubleBuilder.finish()[4], 4.4) - let stringBuilder = try ArrowArrayBuilders.loadStringArrayBuilder() - stringBuilder.append("0", "1", "2", "3", "4", "5", "6") - XCTAssertEqual(stringBuilder.length, 7) - XCTAssertEqual(try stringBuilder.finish()[4], "4") - let boolBuilder = try ArrowArrayBuilders.loadBoolArrayBuilder() - boolBuilder.append(true, false, true, false) - XCTAssertEqual(try boolBuilder.finish()[2], true) - } - - func testAddArray() throws { - let arrayBuilder: NumberArrayBuilder = try ArrowArrayBuilders.loadNumberArrayBuilder() - arrayBuilder.append([0, 1, 2, 3, 4, 5, 6, 7, 8, 9]) - XCTAssertEqual(arrayBuilder.length, 10) - XCTAssertEqual(try arrayBuilder.finish()[2], 2) - let doubleBuilder: NumberArrayBuilder = try ArrowArrayBuilders.loadNumberArrayBuilder() - doubleBuilder.append([0, 1.1, 2.2, 3.3, 4.4, 5.5, 6.6, 7.7, 8.8]) - XCTAssertEqual(doubleBuilder.length, 9) - XCTAssertEqual(try doubleBuilder.finish()[4], 4.4) - let stringBuilder = try ArrowArrayBuilders.loadStringArrayBuilder() - stringBuilder.append(["0", "1", "2", "3", "4", "5", "6"]) - XCTAssertEqual(stringBuilder.length, 7) - XCTAssertEqual(try stringBuilder.finish()[4], "4") - let boolBuilder = try ArrowArrayBuilders.loadBoolArrayBuilder() - boolBuilder.append([true, false, true, false]) - XCTAssertEqual(try boolBuilder.finish()[2], true) - } -} diff --git a/swift/Arrow/Tests/ArrowTests/CDataTests.swift b/swift/Arrow/Tests/ArrowTests/CDataTests.swift deleted file mode 100644 index 2344b234745..00000000000 --- a/swift/Arrow/Tests/ArrowTests/CDataTests.swift +++ /dev/null @@ -1,125 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -import Foundation -import XCTest -@testable import Arrow -import ArrowC - -final class CDataTests: XCTestCase { - func makeSchema() -> Arrow.ArrowSchema { - let schemaBuilder = ArrowSchema.Builder() - return schemaBuilder - .addField("colBool", type: ArrowType(ArrowType.ArrowBool), isNullable: false) - .addField("colUInt8", type: ArrowType(ArrowType.ArrowUInt8), isNullable: true) - .addField("colUInt16", type: ArrowType(ArrowType.ArrowUInt16), isNullable: true) - .addField("colUInt32", type: ArrowType(ArrowType.ArrowUInt32), isNullable: true) - .addField("colUInt64", type: ArrowType(ArrowType.ArrowUInt64), isNullable: true) - .addField("colInt8", type: ArrowType(ArrowType.ArrowInt8), isNullable: false) - .addField("colInt16", type: ArrowType(ArrowType.ArrowInt16), isNullable: false) - .addField("colInt32", type: ArrowType(ArrowType.ArrowInt32), isNullable: false) - .addField("colInt64", type: ArrowType(ArrowType.ArrowInt64), isNullable: false) - .addField("colString", type: ArrowType(ArrowType.ArrowString), isNullable: false) - .addField("colBinary", type: ArrowType(ArrowType.ArrowBinary), isNullable: false) - .addField("colDate32", type: ArrowType(ArrowType.ArrowDate32), isNullable: false) - .addField("colDate64", type: ArrowType(ArrowType.ArrowDate64), isNullable: false) - .addField("colTime32", type: ArrowType(ArrowType.ArrowTime32), isNullable: false) - .addField("colTime32s", type: ArrowTypeTime32(.seconds), isNullable: false) - .addField("colTime32m", type: ArrowTypeTime32(.milliseconds), isNullable: false) - .addField("colTime64", type: ArrowType(ArrowType.ArrowTime64), isNullable: false) - .addField("colTime64u", type: ArrowTypeTime64(.microseconds), isNullable: false) - .addField("colTime64n", type: ArrowTypeTime64(.nanoseconds), isNullable: false) - .addField("colTime64", type: ArrowType(ArrowType.ArrowTime64), isNullable: false) - .addField("colFloat", type: ArrowType(ArrowType.ArrowFloat), isNullable: false) - .addField("colDouble", type: ArrowType(ArrowType.ArrowDouble), isNullable: false) - .finish() - } - - func checkImportField(_ cSchema: ArrowC.ArrowSchema, name: String, type: ArrowType.Info) throws { - let importer = ArrowCImporter() - switch importer.importField(cSchema) { - case .success(let arrowField): - XCTAssertEqual(arrowField.type.info, type) - XCTAssertEqual(arrowField.name, name) - case .failure(let error): - throw error - } - } - - func testImportExportSchema() throws { - let schema = makeSchema() - let exporter = ArrowCExporter() - for arrowField in schema.fields { - var cSchema = ArrowC.ArrowSchema() - switch exporter.exportField(&cSchema, field: arrowField) { - case .success: - try checkImportField(cSchema, name: arrowField.name, type: arrowField.type.info) - case .failure(let error): - throw error - } - } - } - - func testImportExportArray() throws { - let stringBuilder = try ArrowArrayBuilders.loadStringArrayBuilder() - for index in 0..<100 { - if index % 10 == 9 { - stringBuilder.append(nil) - } else { - stringBuilder.append("test" + String(index)) - } - } - - XCTAssertEqual(stringBuilder.nullCount, 10) - XCTAssertEqual(stringBuilder.length, 100) - XCTAssertEqual(stringBuilder.capacity, 648) - let stringArray = try stringBuilder.finish() - let exporter = ArrowCExporter() - var cArray = ArrowC.ArrowArray() - exporter.exportArray(&cArray, arrowData: stringArray.arrowData) - let cArrayMutPtr = UnsafeMutablePointer.allocate(capacity: 1) - cArrayMutPtr.pointee = cArray - defer { - cArrayMutPtr.deallocate() - } - - let importer = ArrowCImporter() - switch importer.importArray(UnsafePointer(cArrayMutPtr), arrowType: ArrowType(ArrowType.ArrowString)) { - case .success(let holder): - let builder = RecordBatch.Builder() - switch builder - .addColumn("test", arrowArray: holder) - .finish() { - case .success(let rb): - XCTAssertEqual(rb.columnCount, 1) - XCTAssertEqual(rb.length, 100) - let col1: Arrow.ArrowArray = rb.data(for: 0) - for index in 0.. = try ArrowArrayBuilders.loadNumberArrayBuilder() - let int16Builder: NumberArrayBuilder = try ArrowArrayBuilders.loadNumberArrayBuilder() - let int32Builder: NumberArrayBuilder = try ArrowArrayBuilders.loadNumberArrayBuilder() - let int64Builder: NumberArrayBuilder = try ArrowArrayBuilders.loadNumberArrayBuilder() - let uint8Builder: NumberArrayBuilder = try ArrowArrayBuilders.loadNumberArrayBuilder() - let uint16Builder: NumberArrayBuilder = try ArrowArrayBuilders.loadNumberArrayBuilder() - let uint32Builder: NumberArrayBuilder = try ArrowArrayBuilders.loadNumberArrayBuilder() - let uint64Builder: NumberArrayBuilder = try ArrowArrayBuilders.loadNumberArrayBuilder() - let floatBuilder: NumberArrayBuilder = try ArrowArrayBuilders.loadNumberArrayBuilder() - let doubleBuilder: NumberArrayBuilder = try ArrowArrayBuilders.loadNumberArrayBuilder() - let stringBuilder = try ArrowArrayBuilders.loadStringArrayBuilder() - let dateBuilder = try ArrowArrayBuilders.loadDate64ArrayBuilder() - - boolBuilder.append(false, true, false) - int8Builder.append(10, 11, 12) - int16Builder.append(20, 21, 22) - int32Builder.append(30, 31, 32) - int64Builder.append(40, 41, 42) - uint8Builder.append(50, 51, 52) - uint16Builder.append(60, 61, 62) - uint32Builder.append(70, 71, 72) - uint64Builder.append(80, 81, 82) - floatBuilder.append(90.1, 91.1, 92.1) - doubleBuilder.append(101.1, nil, nil) - stringBuilder.append("test0", "test1", "test2") - dateBuilder.append(date1, date1, date1) - let result = RecordBatch.Builder() - .addColumn("propBool", arrowArray: try boolBuilder.toHolder()) - .addColumn("propInt8", arrowArray: try int8Builder.toHolder()) - .addColumn("propInt16", arrowArray: try int16Builder.toHolder()) - .addColumn("propInt32", arrowArray: try int32Builder.toHolder()) - .addColumn("propInt64", arrowArray: try int64Builder.toHolder()) - .addColumn("propUInt8", arrowArray: try uint8Builder.toHolder()) - .addColumn("propUInt16", arrowArray: try uint16Builder.toHolder()) - .addColumn("propUInt32", arrowArray: try uint32Builder.toHolder()) - .addColumn("propUInt64", arrowArray: try uint64Builder.toHolder()) - .addColumn("propFloat", arrowArray: try floatBuilder.toHolder()) - .addColumn("propDouble", arrowArray: try doubleBuilder.toHolder()) - .addColumn("propString", arrowArray: try stringBuilder.toHolder()) - .addColumn("propDate", arrowArray: try dateBuilder.toHolder()) - .finish() - switch result { - case .success(let rb): - let decoder = ArrowDecoder(rb) - let testClasses = try decoder.decode(TestClass.self) - for index in 0.. = try ArrowArrayBuilders.loadNumberArrayBuilder() - int8Builder.append(10, 11, 12) - let result = RecordBatch.Builder() - .addColumn("propInt8", arrowArray: try int8Builder.toHolder()) - .finish() - switch result { - case .success(let rb): - let decoder = ArrowDecoder(rb) - let testData = try decoder.decode(Int8?.self) - for index in 0.. = try ArrowArrayBuilders.loadNumberArrayBuilder() - int8WNilBuilder.append(10, nil, 12, nil) - let resultWNil = RecordBatch.Builder() - .addColumn("propInt8", arrowArray: try int8WNilBuilder.toHolder()) - .finish() - switch resultWNil { - case .success(let rb): - let decoder = ArrowDecoder(rb) - let testData = try decoder.decode(Int8?.self) - for index in 0.. = try ArrowArrayBuilders.loadNumberArrayBuilder() - let stringBuilder = try ArrowArrayBuilders.loadStringArrayBuilder() - int8Builder.append(10, 11, 12, 13) - stringBuilder.append("test10", "test11", "test12", "test13") - switch RecordBatch.Builder() - .addColumn("propInt8", arrowArray: try int8Builder.toHolder()) - .addColumn("propString", arrowArray: try stringBuilder.toHolder()) - .finish() { - case .success(let rb): - let decoder = ArrowDecoder(rb) - let testData = try decoder.decode([Int8: String].self) - for data in testData { - XCTAssertEqual("test\(data.key)", data.value) - } - case .failure(let err): - throw err - } - - switch RecordBatch.Builder() - .addColumn("propString", arrowArray: try stringBuilder.toHolder()) - .addColumn("propInt8", arrowArray: try int8Builder.toHolder()) - .finish() { - case .success(let rb): - let decoder = ArrowDecoder(rb) - let testData = try decoder.decode([String: Int8].self) - for data in testData { - XCTAssertEqual("test\(data.value)", data.key) - } - case .failure(let err): - throw err - } - } - - func testArrowMapDecoderWithNull() throws { - let int8Builder: NumberArrayBuilder = try ArrowArrayBuilders.loadNumberArrayBuilder() - let stringWNilBuilder = try ArrowArrayBuilders.loadStringArrayBuilder() - int8Builder.append(10, 11, 12, 13) - stringWNilBuilder.append(nil, "test11", nil, "test13") - let resultWNil = RecordBatch.Builder() - .addColumn("propInt8", arrowArray: try int8Builder.toHolder()) - .addColumn("propString", arrowArray: try stringWNilBuilder.toHolder()) - .finish() - switch resultWNil { - case .success(let rb): - let decoder = ArrowDecoder(rb) - let testData = try decoder.decode([Int8: String?].self) - for data in testData { - let str = data.value - if data.key % 2 == 0 { - XCTAssertNil(str) - } else { - XCTAssertEqual(str, "test\(data.key)") - } - } - case .failure(let err): - throw err - } - } - - func getArrayValue(_ rb: RecordBatch, colIndex: Int, rowIndex: UInt) -> T? { - let anyArray = rb.columns[colIndex].array - return anyArray.asAny(UInt(rowIndex)) as? T - } - - func testArrowKeyedEncoder() throws { // swiftlint:disable:this function_body_length - var infos = [TestClass]() - for index in 0..<10 { - let tClass = TestClass() - let offset = index * 12 - tClass.propBool = index % 2 == 0 - tClass.propInt8 = Int8(offset + 1) - tClass.propInt16 = Int16(offset + 2) - tClass.propInt32 = Int32(offset + 3) - tClass.propInt64 = Int64(offset + 4) - tClass.propUInt8 = UInt8(offset + 5) - tClass.propUInt16 = UInt16(offset + 6) - tClass.propUInt32 = UInt32(offset + 7) - tClass.propUInt64 = UInt64(offset + 8) - tClass.propFloat = Float(offset + 9) - tClass.propDouble = index % 2 == 0 ? Double(offset + 10) : nil - tClass.propString = "\(offset + 11)" - tClass.propDate = Date.now - infos.append(tClass) - } - - let rb = try ArrowEncoder.encode(infos)! - XCTAssertEqual(Int(rb.length), infos.count) - XCTAssertEqual(rb.columns.count, 13) - XCTAssertEqual(rb.columns[0].type.id, ArrowTypeId.boolean) - XCTAssertEqual(rb.columns[1].type.id, ArrowTypeId.int8) - XCTAssertEqual(rb.columns[2].type.id, ArrowTypeId.int16) - XCTAssertEqual(rb.columns[3].type.id, ArrowTypeId.int32) - XCTAssertEqual(rb.columns[4].type.id, ArrowTypeId.int64) - XCTAssertEqual(rb.columns[5].type.id, ArrowTypeId.uint8) - XCTAssertEqual(rb.columns[6].type.id, ArrowTypeId.uint16) - XCTAssertEqual(rb.columns[7].type.id, ArrowTypeId.uint32) - XCTAssertEqual(rb.columns[8].type.id, ArrowTypeId.uint64) - XCTAssertEqual(rb.columns[9].type.id, ArrowTypeId.float) - XCTAssertEqual(rb.columns[10].type.id, ArrowTypeId.double) - XCTAssertEqual(rb.columns[11].type.id, ArrowTypeId.string) - XCTAssertEqual(rb.columns[12].type.id, ArrowTypeId.date64) - for index in 0..<10 { - let offset = index * 12 - XCTAssertEqual(getArrayValue(rb, colIndex: 0, rowIndex: UInt(index)), index % 2 == 0) - XCTAssertEqual(getArrayValue(rb, colIndex: 1, rowIndex: UInt(index)), Int8(offset + 1)) - XCTAssertEqual(getArrayValue(rb, colIndex: 2, rowIndex: UInt(index)), Int16(offset + 2)) - XCTAssertEqual(getArrayValue(rb, colIndex: 3, rowIndex: UInt(index)), Int32(offset + 3)) - XCTAssertEqual(getArrayValue(rb, colIndex: 4, rowIndex: UInt(index)), Int64(offset + 4)) - XCTAssertEqual(getArrayValue(rb, colIndex: 5, rowIndex: UInt(index)), UInt8(offset + 5)) - XCTAssertEqual(getArrayValue(rb, colIndex: 6, rowIndex: UInt(index)), UInt16(offset + 6)) - XCTAssertEqual(getArrayValue(rb, colIndex: 7, rowIndex: UInt(index)), UInt32(offset + 7)) - XCTAssertEqual(getArrayValue(rb, colIndex: 8, rowIndex: UInt(index)), UInt64(offset + 8)) - XCTAssertEqual(getArrayValue(rb, colIndex: 9, rowIndex: UInt(index)), Float(offset + 9)) - if index % 2 == 0 { - XCTAssertEqual(getArrayValue(rb, colIndex: 10, rowIndex: UInt(index)), Double(offset + 10)) - } else { - XCTAssertEqual(getArrayValue(rb, colIndex: 10, rowIndex: UInt(index)), Double?(nil)) - } - - XCTAssertEqual(getArrayValue(rb, colIndex: 11, rowIndex: UInt(index)), String(offset + 11)) - } - } - - func testArrowUnkeyedEncoder() throws { - var testMap = [Int8: String?]() - for index in 0..<10 { - testMap[Int8(index)] = "test\(index)" - } - - let rb = try ArrowEncoder.encode(testMap) - XCTAssertEqual(Int(rb.length), testMap.count) - XCTAssertEqual(rb.columns.count, 2) - XCTAssertEqual(rb.columns[0].type.id, ArrowTypeId.int8) - XCTAssertEqual(rb.columns[1].type.id, ArrowTypeId.string) - for index in 0..<10 { - let key: Int8 = getArrayValue(rb, colIndex: 0, rowIndex: UInt(index))! - let value: String = getArrayValue(rb, colIndex: 1, rowIndex: UInt(index))! - XCTAssertEqual("test\(key)", value) - } - } - - func testArrowSingleEncoder() throws { - var intArray = [Int32?]() - for index in 0..<100 { - if index == 10 { - intArray.append(nil) - } else { - intArray.append(Int32(index)) - } - } - - let rb = try ArrowEncoder.encode(intArray)! - XCTAssertEqual(Int(rb.length), intArray.count) - XCTAssertEqual(rb.columns.count, 1) - XCTAssertEqual(rb.columns[0].type.id, ArrowTypeId.int32) - for index in 0..<100 { - if index == 10 { - let anyArray = rb.columns[0].array - XCTAssertNil(anyArray.asAny(UInt(index))) - } else { - XCTAssertEqual(getArrayValue(rb, colIndex: 0, rowIndex: UInt(index)), Int32(index)) - } - } - } -} diff --git a/swift/Arrow/Tests/ArrowTests/IPCTests.swift b/swift/Arrow/Tests/ArrowTests/IPCTests.swift deleted file mode 100644 index 26f38ce4e07..00000000000 --- a/swift/Arrow/Tests/ArrowTests/IPCTests.swift +++ /dev/null @@ -1,616 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -import XCTest -import FlatBuffers -@testable import Arrow - -let currentDate = Date.now -class StructTest { - var field0: Bool = false - var field1: Int8 = 0 - var field2: Int16 = 0 - var field: Int32 = 0 - var field4: Int64 = 0 - var field5: UInt8 = 0 - var field6: UInt16 = 0 - var field7: UInt32 = 0 - var field8: UInt64 = 0 - var field9: Double = 0 - var field10: Float = 0 - var field11: String = "" - var field12 = Data() - var field13: Date = currentDate -} - -@discardableResult -func checkBoolRecordBatch(_ result: Result) throws -> [RecordBatch] { - let recordBatches: [RecordBatch] - switch result { - case .success(let result): - recordBatches = result.batches - case .failure(let error): - throw error - } - - XCTAssertEqual(recordBatches.count, 1) - for recordBatch in recordBatches { - XCTAssertEqual(recordBatch.length, 5) - XCTAssertEqual(recordBatch.columns.count, 2) - XCTAssertEqual(recordBatch.schema.fields.count, 2) - XCTAssertEqual(recordBatch.schema.fields[0].name, "one") - XCTAssertEqual(recordBatch.schema.fields[0].type.info, ArrowType.ArrowBool) - XCTAssertEqual(recordBatch.schema.fields[1].name, "two") - XCTAssertEqual(recordBatch.schema.fields[1].type.info, ArrowType.ArrowString) - for index in 0..) throws -> [RecordBatch] { - let recordBatches: [RecordBatch] - switch result { - case .success(let result): - recordBatches = result.batches - case .failure(let error): - throw error - } - - XCTAssertEqual(recordBatches.count, 1) - for recordBatch in recordBatches { - XCTAssertEqual(recordBatch.length, 3) - XCTAssertEqual(recordBatch.columns.count, 1) - XCTAssertEqual(recordBatch.schema.fields.count, 1) - XCTAssertEqual(recordBatch.schema.fields[0].name, "my struct") - XCTAssertEqual(recordBatch.schema.fields[0].type.id, .strct) - let structArray = recordBatch.columns[0].array as? StructArray - XCTAssertEqual(structArray!.arrowFields!.count, 2) - XCTAssertEqual(structArray!.arrowFields![0].type.id, .string) - XCTAssertEqual(structArray!.arrowFields![1].type.id, .boolean) - let column = recordBatch.columns[0] - let str = column.array as? AsString - XCTAssertEqual("\(str!.asString(0))", "{0,false}") - XCTAssertEqual("\(str!.asString(1))", "{1,true}") - XCTAssertTrue(column.array.asAny(2) == nil) - } - - return recordBatches -} - -func currentDirectory(path: String = #file) -> URL { - return URL(fileURLWithPath: path).deletingLastPathComponent() -} - -func makeSchema() -> ArrowSchema { - let schemaBuilder = ArrowSchema.Builder() - return schemaBuilder.addField("col1", type: ArrowType(ArrowType.ArrowUInt8), isNullable: true) - .addField("col2", type: ArrowType(ArrowType.ArrowString), isNullable: false) - .addField("col3", type: ArrowType(ArrowType.ArrowDate32), isNullable: false) - .addField("col4", type: ArrowType(ArrowType.ArrowInt32), isNullable: false) - .addField("col5", type: ArrowType(ArrowType.ArrowFloat), isNullable: false) - .finish() -} - -func makeStructSchema() -> ArrowSchema { - let testObj = StructTest() - var fields = [ArrowField]() - let buildStructType = {() -> ArrowNestedType in - let mirror = Mirror(reflecting: testObj) - for (property, value) in mirror.children { - let arrowType = ArrowType(ArrowType.infoForType(type(of: value))) - fields.append(ArrowField(property!, type: arrowType, isNullable: true)) - } - - return ArrowNestedType(ArrowType.ArrowStruct, fields: fields) - } - - return ArrowSchema.Builder() - .addField("struct1", type: buildStructType(), isNullable: true) - .finish() -} - -func makeStructRecordBatch() throws -> RecordBatch { - let testData = StructTest() - let dateNow = Date.now - let structBuilder = try ArrowArrayBuilders.loadStructArrayBuilderForType(testData) - structBuilder.append([true, Int8(1), Int16(2), Int32(3), Int64(4), - UInt8(5), UInt16(6), UInt32(7), UInt64(8), Double(9.9), - Float(10.10), "11", Data("12".utf8), dateNow]) - structBuilder.append(nil) - structBuilder.append([true, Int8(13), Int16(14), Int32(15), Int64(16), - UInt8(17), UInt16(18), UInt32(19), UInt64(20), Double(21.21), - Float(22.22), "23", Data("24".utf8), dateNow]) - let structHolder = ArrowArrayHolderImpl(try structBuilder.finish()) - let result = RecordBatch.Builder() - .addColumn("struct1", arrowArray: structHolder) - .finish() - switch result { - case .success(let recordBatch): - return recordBatch - case .failure(let error): - throw error - } -} - -func makeRecordBatch() throws -> RecordBatch { - let uint8Builder: NumberArrayBuilder = try ArrowArrayBuilders.loadNumberArrayBuilder() - uint8Builder.append(10) - uint8Builder.append(nil) - uint8Builder.append(nil) - uint8Builder.append(44) - let stringBuilder = try ArrowArrayBuilders.loadStringArrayBuilder() - stringBuilder.append("test10") - stringBuilder.append("test22") - stringBuilder.append("test33") - stringBuilder.append("test44") - let date32Builder = try ArrowArrayBuilders.loadDate32ArrayBuilder() - let date2 = Date(timeIntervalSinceReferenceDate: 86400 * 1) - let date1 = Date(timeIntervalSinceReferenceDate: 86400 * 5000 + 352) - date32Builder.append(date1) - date32Builder.append(date2) - date32Builder.append(date1) - date32Builder.append(date2) - let int32Builder: NumberArrayBuilder = try ArrowArrayBuilders.loadNumberArrayBuilder() - int32Builder.append(1) - int32Builder.append(2) - int32Builder.append(3) - int32Builder.append(4) - let floatBuilder: NumberArrayBuilder = try ArrowArrayBuilders.loadNumberArrayBuilder() - floatBuilder.append(211.112) - floatBuilder.append(322.223) - floatBuilder.append(433.334) - floatBuilder.append(544.445) - - let uint8Holder = ArrowArrayHolderImpl(try uint8Builder.finish()) - let stringHolder = ArrowArrayHolderImpl(try stringBuilder.finish()) - let date32Holder = ArrowArrayHolderImpl(try date32Builder.finish()) - let int32Holder = ArrowArrayHolderImpl(try int32Builder.finish()) - let floatHolder = ArrowArrayHolderImpl(try floatBuilder.finish()) - let result = RecordBatch.Builder() - .addColumn("col1", arrowArray: uint8Holder) - .addColumn("col2", arrowArray: stringHolder) - .addColumn("col3", arrowArray: date32Holder) - .addColumn("col4", arrowArray: int32Holder) - .addColumn("col5", arrowArray: floatHolder) - .finish() - switch result { - case .success(let recordBatch): - return recordBatch - case .failure(let error): - throw error - } -} - -final class IPCStreamReaderTests: XCTestCase { - func testRBInMemoryToFromStream() throws { - let schema = makeSchema() - let recordBatch = try makeRecordBatch() - let arrowWriter = ArrowWriter() - let writerInfo = ArrowWriter.Info(.recordbatch, schema: schema, batches: [recordBatch]) - switch arrowWriter.writeStreaming(writerInfo) { - case .success(let writeData): - let arrowReader = ArrowReader() - switch arrowReader.readStreaming(writeData) { - case .success(let result): - let recordBatches = result.batches - XCTAssertEqual(recordBatches.count, 1) - for recordBatch in recordBatches { - XCTAssertEqual(recordBatch.length, 4) - XCTAssertEqual(recordBatch.columns.count, 5) - XCTAssertEqual(recordBatch.schema.fields.count, 5) - XCTAssertEqual(recordBatch.schema.fields[0].name, "col1") - XCTAssertEqual(recordBatch.schema.fields[0].type.info, ArrowType.ArrowUInt8) - XCTAssertEqual(recordBatch.schema.fields[1].name, "col2") - XCTAssertEqual(recordBatch.schema.fields[1].type.info, ArrowType.ArrowString) - XCTAssertEqual(recordBatch.schema.fields[2].name, "col3") - XCTAssertEqual(recordBatch.schema.fields[2].type.info, ArrowType.ArrowDate32) - XCTAssertEqual(recordBatch.schema.fields[3].name, "col4") - XCTAssertEqual(recordBatch.schema.fields[3].type.info, ArrowType.ArrowInt32) - XCTAssertEqual(recordBatch.schema.fields[4].name, "col5") - XCTAssertEqual(recordBatch.schema.fields[4].type.info, ArrowType.ArrowFloat) - let columns = recordBatch.columns - XCTAssertEqual(columns[0].nullCount, 2) - let dateVal = - "\((columns[2].array as! AsString).asString(0))" // swiftlint:disable:this force_cast - XCTAssertEqual(dateVal, "2014-09-10 00:00:00 +0000") - let stringVal = - "\((columns[1].array as! AsString).asString(1))" // swiftlint:disable:this force_cast - XCTAssertEqual(stringVal, "test22") - let uintVal = - "\((columns[0].array as! AsString).asString(0))" // swiftlint:disable:this force_cast - XCTAssertEqual(uintVal, "10") - let stringVal2 = - "\((columns[1].array as! AsString).asString(3))" // swiftlint:disable:this force_cast - XCTAssertEqual(stringVal2, "test44") - let uintVal2 = - "\((columns[0].array as! AsString).asString(3))" // swiftlint:disable:this force_cast - XCTAssertEqual(uintVal2, "44") - } - case.failure(let error): - throw error - } - case .failure(let error): - throw error - } - } -} - -final class IPCFileReaderTests: XCTestCase { // swiftlint:disable:this type_body_length - func testFileReader_double() throws { - let fileURL = currentDirectory().appendingPathComponent("../../testdata_double.arrow") - let arrowReader = ArrowReader() - let result = arrowReader.fromFile(fileURL) - let recordBatches: [RecordBatch] - switch result { - case .success(let result): - recordBatches = result.batches - case .failure(let error): - throw error - } - - XCTAssertEqual(recordBatches.count, 1) - for recordBatch in recordBatches { - XCTAssertEqual(recordBatch.length, 5) - XCTAssertEqual(recordBatch.columns.count, 2) - XCTAssertEqual(recordBatch.schema.fields.count, 2) - XCTAssertEqual(recordBatch.schema.fields[0].name, "one") - XCTAssertEqual(recordBatch.schema.fields[0].type.info, ArrowType.ArrowDouble) - XCTAssertEqual(recordBatch.schema.fields[1].name, "two") - XCTAssertEqual(recordBatch.schema.fields[1].type.info, ArrowType.ArrowString) - for index in 0.. (ArrowSchema, RecordBatch) { - let schemaBuilder = ArrowSchema.Builder() - let schema = schemaBuilder.addField("binary", type: ArrowType(ArrowType.ArrowBinary), isNullable: false) - .finish() - - let binaryBuilder = try ArrowArrayBuilders.loadBinaryArrayBuilder() - binaryBuilder.append("test10".data(using: .utf8)) - binaryBuilder.append("test22".data(using: .utf8)) - binaryBuilder.append("test33".data(using: .utf8)) - binaryBuilder.append("test44".data(using: .utf8)) - - let binaryHolder = ArrowArrayHolderImpl(try binaryBuilder.finish()) - let result = RecordBatch.Builder() - .addColumn("binary", arrowArray: binaryHolder) - .finish() - switch result { - case .success(let recordBatch): - return (schema, recordBatch) - case .failure(let error): - throw error - } - } - - func makeTimeDataset() throws -> (ArrowSchema, RecordBatch) { - let schemaBuilder = ArrowSchema.Builder() - let schema = schemaBuilder.addField("time64", type: ArrowTypeTime64(.microseconds), isNullable: false) - .addField("time32", type: ArrowTypeTime32(.milliseconds), isNullable: false) - .finish() - - let time64Builder = try ArrowArrayBuilders.loadTime64ArrayBuilder(.nanoseconds) - time64Builder.append(12345678) - time64Builder.append(1) - time64Builder.append(nil) - time64Builder.append(98765432) - let time32Builder = try ArrowArrayBuilders.loadTime32ArrayBuilder(.milliseconds) - time32Builder.append(1) - time32Builder.append(2) - time32Builder.append(nil) - time32Builder.append(3) - let time64Holder = ArrowArrayHolderImpl(try time64Builder.finish()) - let time32Holder = ArrowArrayHolderImpl(try time32Builder.finish()) - let result = RecordBatch.Builder() - .addColumn("time64", arrowArray: time64Holder) - .addColumn("time32", arrowArray: time32Holder) - .finish() - switch result { - case .success(let recordBatch): - return (schema, recordBatch) - case .failure(let error): - throw error - } - } - - func testStructRBInMemoryToFromStream() throws { - // read existing file - let schema = makeStructSchema() - let recordBatch = try makeStructRecordBatch() - let arrowWriter = ArrowWriter() - let writerInfo = ArrowWriter.Info(.recordbatch, schema: schema, batches: [recordBatch]) - switch arrowWriter.writeStreaming(writerInfo) { - case .success(let writeData): - let arrowReader = ArrowReader() - switch arrowReader.readStreaming(writeData) { - case .success(let result): - let recordBatches = result.batches - XCTAssertEqual(recordBatches.count, 1) - for recordBatch in recordBatches { - XCTAssertEqual(recordBatch.length, 3) - XCTAssertEqual(recordBatch.columns.count, 1) - XCTAssertEqual(recordBatch.schema.fields.count, 1) - XCTAssertEqual(recordBatch.schema.fields[0].name, "struct1") - XCTAssertEqual(recordBatch.schema.fields[0].type.id, .strct) - XCTAssertTrue(recordBatch.schema.fields[0].type is ArrowNestedType) - let nestedType = (recordBatch.schema.fields[0].type as? ArrowNestedType)! - XCTAssertEqual(nestedType.fields.count, 14) - let columns = recordBatch.columns - XCTAssertEqual(columns[0].nullCount, 1) - XCTAssertNil(columns[0].array.asAny(1)) - let structVal = - "\((columns[0].array as? AsString)!.asString(0))" - XCTAssertEqual(structVal, "{true,1,2,3,4,5,6,7,8,9.9,10.1,11,12,\(currentDate)}") - let structArray = (recordBatch.columns[0].array as? StructArray)! - XCTAssertEqual(structArray.length, 3) - XCTAssertEqual(structArray.arrowFields!.count, 14) - XCTAssertEqual(structArray.arrowFields![0].type.id, .boolean) - XCTAssertEqual(structArray.arrowFields![1].type.id, .int8) - XCTAssertEqual(structArray.arrowFields![2].type.id, .int16) - XCTAssertEqual(structArray.arrowFields![3].type.id, .int32) - XCTAssertEqual(structArray.arrowFields![4].type.id, .int64) - XCTAssertEqual(structArray.arrowFields![5].type.id, .uint8) - XCTAssertEqual(structArray.arrowFields![6].type.id, .uint16) - XCTAssertEqual(structArray.arrowFields![7].type.id, .uint32) - XCTAssertEqual(structArray.arrowFields![8].type.id, .uint64) - XCTAssertEqual(structArray.arrowFields![9].type.id, .double) - XCTAssertEqual(structArray.arrowFields![10].type.id, .float) - XCTAssertEqual(structArray.arrowFields![11].type.id, .string) - XCTAssertEqual(structArray.arrowFields![12].type.id, .binary) - XCTAssertEqual(structArray.arrowFields![13].type.id, .date64) - } - case.failure(let error): - throw error - } - case .failure(let error): - throw error - } - } - - func testBinaryInMemoryToFromStream() throws { - let dataset = try makeBinaryDataset() - let writerInfo = ArrowWriter.Info(.recordbatch, schema: dataset.0, batches: [dataset.1]) - let arrowWriter = ArrowWriter() - switch arrowWriter.writeFile(writerInfo) { - case .success(let writeData): - let arrowReader = ArrowReader() - switch arrowReader.readFile(writeData) { - case .success(let result): - XCTAssertNotNil(result.schema) - let schema = result.schema! - XCTAssertEqual(schema.fields.count, 1) - XCTAssertEqual(schema.fields[0].name, "binary") - XCTAssertEqual(schema.fields[0].type.info, ArrowType.ArrowBinary) - XCTAssertEqual(result.batches.count, 1) - let recordBatch = result.batches[0] - XCTAssertEqual(recordBatch.length, 4) - let columns = recordBatch.columns - let stringVal = - "\((columns[0].array as! AsString).asString(1))" // swiftlint:disable:this force_cast - XCTAssertEqual(stringVal, "test22") - case.failure(let error): - throw error - } - case .failure(let error): - throw error - } - } - - func testTimeInMemoryToFromStream() throws { - let dataset = try makeTimeDataset() - let writerInfo = ArrowWriter.Info(.recordbatch, schema: dataset.0, batches: [dataset.1]) - let arrowWriter = ArrowWriter() - switch arrowWriter.writeFile(writerInfo) { - case .success(let writeData): - let arrowReader = ArrowReader() - switch arrowReader.readFile(writeData) { - case .success(let result): - XCTAssertNotNil(result.schema) - let schema = result.schema! - XCTAssertEqual(schema.fields.count, 2) - XCTAssertEqual(schema.fields[0].name, "time64") - XCTAssertEqual(schema.fields[0].type.info, ArrowType.ArrowTime64) - XCTAssertEqual(schema.fields[1].name, "time32") - XCTAssertEqual(schema.fields[1].type.info, ArrowType.ArrowTime32) - XCTAssertEqual(result.batches.count, 1) - let recordBatch = result.batches[0] - XCTAssertEqual(recordBatch.length, 4) - let columns = recordBatch.columns - let stringVal = - "\((columns[0].array as! AsString).asString(0))" // swiftlint:disable:this force_cast - XCTAssertEqual(stringVal, "12345678") - let stringVal2 = - "\((columns[1].array as! AsString).asString(3))" // swiftlint:disable:this force_cast - XCTAssertEqual(stringVal2, "3") - case.failure(let error): - throw error - } - case .failure(let error): - throw error - } - } -} -// swiftlint:disable:this file_length diff --git a/swift/Arrow/Tests/ArrowTests/RecordBatchTests.swift b/swift/Arrow/Tests/ArrowTests/RecordBatchTests.swift deleted file mode 100644 index 9961781f308..00000000000 --- a/swift/Arrow/Tests/ArrowTests/RecordBatchTests.swift +++ /dev/null @@ -1,58 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -import XCTest -@testable import Arrow - -final class RecordBatchTests: XCTestCase { - func testRecordBatch() throws { - let uint8Builder: NumberArrayBuilder = try ArrowArrayBuilders.loadNumberArrayBuilder() - uint8Builder.append(10) - uint8Builder.append(22) - uint8Builder.append(nil) - let stringBuilder = try ArrowArrayBuilders.loadStringArrayBuilder() - stringBuilder.append("test10") - stringBuilder.append("test22") - stringBuilder.append("test33") - - let intHolder = ArrowArrayHolderImpl(try uint8Builder.finish()) - let stringHolder = ArrowArrayHolderImpl(try stringBuilder.finish()) - let result = RecordBatch.Builder() - .addColumn("col1", arrowArray: intHolder) - .addColumn("col2", arrowArray: stringHolder) - .finish() - switch result { - case .success(let recordBatch): - let schema = recordBatch.schema - XCTAssertEqual(schema.fields.count, 2) - XCTAssertEqual(schema.fields[0].name, "col1") - XCTAssertEqual(schema.fields[0].type.info, ArrowType.ArrowUInt8) - XCTAssertEqual(schema.fields[0].isNullable, true) - XCTAssertEqual(schema.fields[1].name, "col2") - XCTAssertEqual(schema.fields[1].type.info, ArrowType.ArrowString) - XCTAssertEqual(schema.fields[1].isNullable, false) - XCTAssertEqual(recordBatch.columns.count, 2) - let col1: ArrowArray = recordBatch.data(for: 0) - let col2: ArrowArray = recordBatch.data(for: 1) - XCTAssertEqual(col1.length, 3) - XCTAssertEqual(col2.length, 3) - XCTAssertEqual(col1.nullCount, 1) - case .failure(let error): - throw error - } - } -} diff --git a/swift/Arrow/Tests/ArrowTests/TableTests.swift b/swift/Arrow/Tests/ArrowTests/TableTests.swift deleted file mode 100644 index dc5cabcd652..00000000000 --- a/swift/Arrow/Tests/ArrowTests/TableTests.swift +++ /dev/null @@ -1,211 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -import XCTest -@testable import Arrow - -final class TableTests: XCTestCase { - func testSchema() throws { - let schemaBuilder = ArrowSchema.Builder() - let schema = schemaBuilder.addField("col1", type: ArrowType(ArrowType.ArrowInt8), isNullable: true) - .addField("col2", type: ArrowType(ArrowType.ArrowBool), isNullable: false) - .finish() - XCTAssertEqual(schema.fields.count, 2) - XCTAssertEqual(schema.fields[0].name, "col1") - XCTAssertEqual(schema.fields[0].type.info, ArrowType.ArrowInt8) - XCTAssertEqual(schema.fields[0].isNullable, true) - XCTAssertEqual(schema.fields[1].name, "col2") - XCTAssertEqual(schema.fields[1].type.info, ArrowType.ArrowBool) - XCTAssertEqual(schema.fields[1].isNullable, false) - } - - func testSchemaNested() { - class StructTest { - var field0: Bool = false - var field1: Int8 = 0 - var field2: Int16 = 0 - var field3: Int32 = 0 - var field4: Int64 = 0 - var field5: UInt8 = 0 - var field6: UInt16 = 0 - var field7: UInt32 = 0 - var field8: UInt64 = 0 - var field9: Double = 0 - var field10: Float = 0 - var field11: String = "" - var field12 = Data() - var field13: Date = Date.now - } - - let testObj = StructTest() - var fields = [ArrowField]() - let buildStructType = {() -> ArrowNestedType in - let mirror = Mirror(reflecting: testObj) - for (property, value) in mirror.children { - let arrowType = ArrowType(ArrowType.infoForType(type(of: value))) - fields.append(ArrowField(property!, type: arrowType, isNullable: true)) - } - - return ArrowNestedType(ArrowType.ArrowStruct, fields: fields) - } - - let structType = buildStructType() - XCTAssertEqual(structType.id, ArrowTypeId.strct) - XCTAssertEqual(structType.fields.count, 14) - XCTAssertEqual(structType.fields[0].type.id, ArrowTypeId.boolean) - XCTAssertEqual(structType.fields[1].type.id, ArrowTypeId.int8) - XCTAssertEqual(structType.fields[2].type.id, ArrowTypeId.int16) - XCTAssertEqual(structType.fields[3].type.id, ArrowTypeId.int32) - XCTAssertEqual(structType.fields[4].type.id, ArrowTypeId.int64) - XCTAssertEqual(structType.fields[5].type.id, ArrowTypeId.uint8) - XCTAssertEqual(structType.fields[6].type.id, ArrowTypeId.uint16) - XCTAssertEqual(structType.fields[7].type.id, ArrowTypeId.uint32) - XCTAssertEqual(structType.fields[8].type.id, ArrowTypeId.uint64) - XCTAssertEqual(structType.fields[9].type.id, ArrowTypeId.double) - XCTAssertEqual(structType.fields[10].type.id, ArrowTypeId.float) - XCTAssertEqual(structType.fields[11].type.id, ArrowTypeId.string) - XCTAssertEqual(structType.fields[12].type.id, ArrowTypeId.binary) - XCTAssertEqual(structType.fields[13].type.id, ArrowTypeId.date64) - } - - func testTable() throws { - let doubleBuilder: NumberArrayBuilder = try ArrowArrayBuilders.loadNumberArrayBuilder() - doubleBuilder.append(11.11) - doubleBuilder.append(22.22) - let stringBuilder = try ArrowArrayBuilders.loadStringArrayBuilder() - stringBuilder.append("test10") - stringBuilder.append("test22") - let date32Builder: Date32ArrayBuilder = try ArrowArrayBuilders.loadDate32ArrayBuilder() - let date2 = Date(timeIntervalSinceReferenceDate: 86400 * 1) - let date1 = Date(timeIntervalSinceReferenceDate: 86400 * 5000 + 352) - date32Builder.append(date1) - date32Builder.append(date2) - let table = try ArrowTable.Builder() - .addColumn("col1", arrowArray: doubleBuilder.finish()) - .addColumn("col2", arrowArray: stringBuilder.finish()) - .addColumn("col3", arrowArray: date32Builder.finish()) - .finish() - let schema = table.schema - XCTAssertEqual(schema.fields.count, 3) - XCTAssertEqual(schema.fields[0].name, "col1") - XCTAssertEqual(schema.fields[0].type.info, ArrowType.ArrowDouble) - XCTAssertEqual(schema.fields[0].isNullable, false) - XCTAssertEqual(schema.fields[1].name, "col2") - XCTAssertEqual(schema.fields[1].type.info, ArrowType.ArrowString) - XCTAssertEqual(schema.fields[1].isNullable, false) - XCTAssertEqual(schema.fields[1].name, "col2") - XCTAssertEqual(schema.fields[1].type.info, ArrowType.ArrowString) - XCTAssertEqual(schema.fields[1].isNullable, false) - XCTAssertEqual(table.columns.count, 3) - let col1: ChunkedArray = table.columns[0].data() - let col2: ChunkedArray = table.columns[1].data() - let col3: ChunkedArray = table.columns[2].data() - XCTAssertEqual(col1.length, 2) - XCTAssertEqual(col2.length, 2) - XCTAssertEqual(col3.length, 2) - XCTAssertEqual(col1[0], 11.11) - XCTAssertEqual(col2[1], "test22") - } - - func testTableWithChunkedData() throws { - let uint8Builder: NumberArrayBuilder = try ArrowArrayBuilders.loadNumberArrayBuilder() - uint8Builder.append(10) - uint8Builder.append(22) - let uint8Builder2: NumberArrayBuilder = try ArrowArrayBuilders.loadNumberArrayBuilder() - uint8Builder2.append(33) - let uint8Builder3: NumberArrayBuilder = try ArrowArrayBuilders.loadNumberArrayBuilder() - uint8Builder3.append(44) - let stringBuilder = try ArrowArrayBuilders.loadStringArrayBuilder() - stringBuilder.append("test10") - stringBuilder.append("test22") - let stringBuilder2 = try ArrowArrayBuilders.loadStringArrayBuilder() - stringBuilder.append("test33") - stringBuilder.append("test44") - let date32Builder: Date32ArrayBuilder = try ArrowArrayBuilders.loadDate32ArrayBuilder() - let date2 = Date(timeIntervalSinceReferenceDate: 86400 * 1) - let date1 = Date(timeIntervalSinceReferenceDate: 86400 * 5000 + 352) - date32Builder.append(date1) - date32Builder.append(date2) - date32Builder.append(date1) - date32Builder.append(date2) - let intArray = try ChunkedArray([uint8Builder.finish(), uint8Builder2.finish(), uint8Builder3.finish()]) - let stringArray = try ChunkedArray([stringBuilder.finish(), stringBuilder2.finish()]) - let dateArray = try ChunkedArray([date32Builder.finish()]) - let table = ArrowTable.Builder() - .addColumn("col1", chunked: intArray) - .addColumn("col2", chunked: stringArray) - .addColumn("col3", chunked: dateArray) - .finish() - let schema = table.schema - XCTAssertEqual(schema.fields.count, 3) - XCTAssertEqual(schema.fields[0].name, "col1") - XCTAssertEqual(schema.fields[0].type.info, ArrowType.ArrowUInt8) - XCTAssertEqual(schema.fields[0].isNullable, false) - XCTAssertEqual(schema.fields[1].name, "col2") - XCTAssertEqual(schema.fields[1].type.info, ArrowType.ArrowString) - XCTAssertEqual(schema.fields[1].isNullable, false) - XCTAssertEqual(schema.fields[1].name, "col2") - XCTAssertEqual(schema.fields[1].type.info, ArrowType.ArrowString) - XCTAssertEqual(schema.fields[1].isNullable, false) - XCTAssertEqual(table.columns.count, 3) - let col1: ChunkedArray = table.columns[0].data() - let col2: ChunkedArray = table.columns[1].data() - let col3: ChunkedArray = table.columns[2].data() - XCTAssertEqual(col1.length, 4) - XCTAssertEqual(col2.length, 4) - XCTAssertEqual(col3.length, 4) - XCTAssertEqual(col1.asString(0), "10") - XCTAssertEqual(col1.asString(3), "44") - XCTAssertEqual(col2.asString(0), "test10") - XCTAssertEqual(col2.asString(2), "test33") - } - - func testTableToRecordBatch() throws { - let uint8Builder: NumberArrayBuilder = try ArrowArrayBuilders.loadNumberArrayBuilder() - uint8Builder.append(10) - uint8Builder.append(22) - let stringBuilder = try ArrowArrayBuilders.loadStringArrayBuilder() - stringBuilder.append("test10") - stringBuilder.append("test22") - let intHolder = ArrowArrayHolderImpl(try uint8Builder.finish()) - let stringHolder = ArrowArrayHolderImpl(try stringBuilder.finish()) - let result = RecordBatch.Builder() - .addColumn("col1", arrowArray: intHolder) - .addColumn("col2", arrowArray: stringHolder) - .finish().flatMap({ rb in - return ArrowTable.from(recordBatches: [rb]) - }) - switch result { - case .success(let table): - let schema = table.schema - XCTAssertEqual(schema.fields.count, 2) - XCTAssertEqual(schema.fields[0].name, "col1") - XCTAssertEqual(schema.fields[0].type.info, ArrowType.ArrowUInt8) - XCTAssertEqual(schema.fields[0].isNullable, false) - XCTAssertEqual(schema.fields[1].name, "col2") - XCTAssertEqual(schema.fields[1].type.info, ArrowType.ArrowString) - XCTAssertEqual(schema.fields[1].isNullable, false) - XCTAssertEqual(table.columns.count, 2) - let col1: ChunkedArray = table.columns[0].data() - let col2: ChunkedArray = table.columns[1].data() - XCTAssertEqual(col1.length, 2) - XCTAssertEqual(col2.length, 2) - case .failure(let error): - throw error - } - } -} diff --git a/swift/ArrowFlight/.gitignore b/swift/ArrowFlight/.gitignore deleted file mode 100644 index d561187385c..00000000000 --- a/swift/ArrowFlight/.gitignore +++ /dev/null @@ -1,9 +0,0 @@ -.DS_Store -/.build -/Packages -/*.xcodeproj -xcuserdata/ -DerivedData/ -.swiftpm/ -.netrc -Package.resolved \ No newline at end of file diff --git a/swift/ArrowFlight/Package.swift b/swift/ArrowFlight/Package.swift deleted file mode 100644 index 581ec45898b..00000000000 --- a/swift/ArrowFlight/Package.swift +++ /dev/null @@ -1,61 +0,0 @@ -// swift-tools-version: 5.10 -// The swift-tools-version declares the minimum version of Swift required to build this package. - -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -import PackageDescription - -let package = Package( - name: "ArrowFlight", - platforms: [ - .macOS(.v10_15) - ], - products: [ - // Products define the executables and libraries a package produces, making them visible to other packages. - .library( - name: "ArrowFlight", - targets: ["ArrowFlight"]) - ], - dependencies: [ - .package(url: "https://github.com/grpc/grpc-swift.git", from: "1.25.0"), - .package(url: "https://github.com/apple/swift-protobuf.git", from: "1.29.0"), - .package(path: "../Arrow") - ], - targets: [ - // Targets are the basic building blocks of a package, defining a module or a test suite. - // Targets can depend on other targets in this package and products from dependencies. - .target( - name: "ArrowFlight", - dependencies: [ - .product(name: "Arrow", package: "Arrow"), - .product(name: "GRPC", package: "grpc-swift"), - .product(name: "SwiftProtobuf", package: "swift-protobuf") - ], - swiftSettings: [ - // build: .unsafeFlags(["-warnings-as-errors"]) - ] - ), - .testTarget( - name: "ArrowFlightTests", - dependencies: ["ArrowFlight"], - swiftSettings: [ - // build: .unsafeFlags(["-warnings-as-errors"]) - ] - ) - ] -) diff --git a/swift/ArrowFlight/Sources/ArrowFlight/Flight.grpc.swift b/swift/ArrowFlight/Sources/ArrowFlight/Flight.grpc.swift deleted file mode 100644 index 8daaa19f07b..00000000000 --- a/swift/ArrowFlight/Sources/ArrowFlight/Flight.grpc.swift +++ /dev/null @@ -1,1343 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. -// -// DO NOT EDIT. -// swift-format-ignore-file -// -// Generated by the protocol buffer compiler. -// Source: Flight.proto -// -import GRPC -import NIO -import NIOConcurrencyHelpers -import SwiftProtobuf - - -/// -/// A flight service is an endpoint for retrieving or storing Arrow data. A -/// flight service can expose one or more predefined endpoints that can be -/// accessed using the Arrow Flight Protocol. Additionally, a flight service -/// can expose a set of actions that are available. -/// -/// Usage: instantiate `Arrow_Flight_Protocol_FlightServiceClient`, then call methods of this protocol to make API calls. -internal protocol Arrow_Flight_Protocol_FlightServiceClientProtocol: GRPCClient { - var serviceName: String { get } - var interceptors: Arrow_Flight_Protocol_FlightServiceClientInterceptorFactoryProtocol? { get } - - func handshake( - callOptions: CallOptions?, - handler: @escaping (Arrow_Flight_Protocol_HandshakeResponse) -> Void - ) -> BidirectionalStreamingCall - - func listFlights( - _ request: Arrow_Flight_Protocol_Criteria, - callOptions: CallOptions?, - handler: @escaping (Arrow_Flight_Protocol_FlightInfo) -> Void - ) -> ServerStreamingCall - - func getFlightInfo( - _ request: Arrow_Flight_Protocol_FlightDescriptor, - callOptions: CallOptions? - ) -> UnaryCall - - func getSchema( - _ request: Arrow_Flight_Protocol_FlightDescriptor, - callOptions: CallOptions? - ) -> UnaryCall - - func doGet( - _ request: Arrow_Flight_Protocol_Ticket, - callOptions: CallOptions?, - handler: @escaping (Arrow_Flight_Protocol_FlightData) -> Void - ) -> ServerStreamingCall - - func doPut( - callOptions: CallOptions?, - handler: @escaping (Arrow_Flight_Protocol_PutResult) -> Void - ) -> BidirectionalStreamingCall - - func doExchange( - callOptions: CallOptions?, - handler: @escaping (Arrow_Flight_Protocol_FlightData) -> Void - ) -> BidirectionalStreamingCall - - func doAction( - _ request: Arrow_Flight_Protocol_Action, - callOptions: CallOptions?, - handler: @escaping (Arrow_Flight_Protocol_Result) -> Void - ) -> ServerStreamingCall - - func listActions( - _ request: Arrow_Flight_Protocol_Empty, - callOptions: CallOptions?, - handler: @escaping (Arrow_Flight_Protocol_ActionType) -> Void - ) -> ServerStreamingCall -} - -extension Arrow_Flight_Protocol_FlightServiceClientProtocol { - internal var serviceName: String { - return "arrow.flight.protocol.FlightService" - } - - /// - /// Handshake between client and server. Depending on the server, the - /// handshake may be required to determine the token that should be used for - /// future operations. Both request and response are streams to allow multiple - /// round-trips depending on auth mechanism. - /// - /// Callers should use the `send` method on the returned object to send messages - /// to the server. The caller should send an `.end` after the final message has been sent. - /// - /// - Parameters: - /// - callOptions: Call options. - /// - handler: A closure called when each response is received from the server. - /// - Returns: A `ClientStreamingCall` with futures for the metadata and status. - internal func handshake( - callOptions: CallOptions? = nil, - handler: @escaping (Arrow_Flight_Protocol_HandshakeResponse) -> Void - ) -> BidirectionalStreamingCall { - return self.makeBidirectionalStreamingCall( - path: Arrow_Flight_Protocol_FlightServiceClientMetadata.Methods.handshake.path, - callOptions: callOptions ?? self.defaultCallOptions, - interceptors: self.interceptors?.makeHandshakeInterceptors() ?? [], - handler: handler - ) - } - - /// - /// Get a list of available streams given a particular criteria. Most flight - /// services will expose one or more streams that are readily available for - /// retrieval. This api allows listing the streams available for - /// consumption. A user can also provide a criteria. The criteria can limit - /// the subset of streams that can be listed via this interface. Each flight - /// service allows its own definition of how to consume criteria. - /// - /// - Parameters: - /// - request: Request to send to ListFlights. - /// - callOptions: Call options. - /// - handler: A closure called when each response is received from the server. - /// - Returns: A `ServerStreamingCall` with futures for the metadata and status. - internal func listFlights( - _ request: Arrow_Flight_Protocol_Criteria, - callOptions: CallOptions? = nil, - handler: @escaping (Arrow_Flight_Protocol_FlightInfo) -> Void - ) -> ServerStreamingCall { - return self.makeServerStreamingCall( - path: Arrow_Flight_Protocol_FlightServiceClientMetadata.Methods.listFlights.path, - request: request, - callOptions: callOptions ?? self.defaultCallOptions, - interceptors: self.interceptors?.makeListFlightsInterceptors() ?? [], - handler: handler - ) - } - - /// - /// For a given FlightDescriptor, get information about how the flight can be - /// consumed. This is a useful interface if the consumer of the interface - /// already can identify the specific flight to consume. This interface can - /// also allow a consumer to generate a flight stream through a specified - /// descriptor. For example, a flight descriptor might be something that - /// includes a SQL statement or a Pickled Python operation that will be - /// executed. In those cases, the descriptor will not be previously available - /// within the list of available streams provided by ListFlights but will be - /// available for consumption for the duration defined by the specific flight - /// service. - /// - /// - Parameters: - /// - request: Request to send to GetFlightInfo. - /// - callOptions: Call options. - /// - Returns: A `UnaryCall` with futures for the metadata, status and response. - internal func getFlightInfo( - _ request: Arrow_Flight_Protocol_FlightDescriptor, - callOptions: CallOptions? = nil - ) -> UnaryCall { - return self.makeUnaryCall( - path: Arrow_Flight_Protocol_FlightServiceClientMetadata.Methods.getFlightInfo.path, - request: request, - callOptions: callOptions ?? self.defaultCallOptions, - interceptors: self.interceptors?.makeGetFlightInfoInterceptors() ?? [] - ) - } - - /// - /// For a given FlightDescriptor, get the Schema as described in Schema.fbs::Schema - /// This is used when a consumer needs the Schema of flight stream. Similar to - /// GetFlightInfo this interface may generate a new flight that was not previously - /// available in ListFlights. - /// - /// - Parameters: - /// - request: Request to send to GetSchema. - /// - callOptions: Call options. - /// - Returns: A `UnaryCall` with futures for the metadata, status and response. - internal func getSchema( - _ request: Arrow_Flight_Protocol_FlightDescriptor, - callOptions: CallOptions? = nil - ) -> UnaryCall { - return self.makeUnaryCall( - path: Arrow_Flight_Protocol_FlightServiceClientMetadata.Methods.getSchema.path, - request: request, - callOptions: callOptions ?? self.defaultCallOptions, - interceptors: self.interceptors?.makeGetSchemaInterceptors() ?? [] - ) - } - - /// - /// Retrieve a single stream associated with a particular descriptor - /// associated with the referenced ticket. A Flight can be composed of one or - /// more streams where each stream can be retrieved using a separate opaque - /// ticket that the flight service uses for managing a collection of streams. - /// - /// - Parameters: - /// - request: Request to send to DoGet. - /// - callOptions: Call options. - /// - handler: A closure called when each response is received from the server. - /// - Returns: A `ServerStreamingCall` with futures for the metadata and status. - internal func doGet( - _ request: Arrow_Flight_Protocol_Ticket, - callOptions: CallOptions? = nil, - handler: @escaping (Arrow_Flight_Protocol_FlightData) -> Void - ) -> ServerStreamingCall { - return self.makeServerStreamingCall( - path: Arrow_Flight_Protocol_FlightServiceClientMetadata.Methods.doGet.path, - request: request, - callOptions: callOptions ?? self.defaultCallOptions, - interceptors: self.interceptors?.makeDoGetInterceptors() ?? [], - handler: handler - ) - } - - /// - /// Push a stream to the flight service associated with a particular - /// flight stream. This allows a client of a flight service to upload a stream - /// of data. Depending on the particular flight service, a client consumer - /// could be allowed to upload a single stream per descriptor or an unlimited - /// number. In the latter, the service might implement a 'seal' action that - /// can be applied to a descriptor once all streams are uploaded. - /// - /// Callers should use the `send` method on the returned object to send messages - /// to the server. The caller should send an `.end` after the final message has been sent. - /// - /// - Parameters: - /// - callOptions: Call options. - /// - handler: A closure called when each response is received from the server. - /// - Returns: A `ClientStreamingCall` with futures for the metadata and status. - internal func doPut( - callOptions: CallOptions? = nil, - handler: @escaping (Arrow_Flight_Protocol_PutResult) -> Void - ) -> BidirectionalStreamingCall { - return self.makeBidirectionalStreamingCall( - path: Arrow_Flight_Protocol_FlightServiceClientMetadata.Methods.doPut.path, - callOptions: callOptions ?? self.defaultCallOptions, - interceptors: self.interceptors?.makeDoPutInterceptors() ?? [], - handler: handler - ) - } - - /// - /// Open a bidirectional data channel for a given descriptor. This - /// allows clients to send and receive arbitrary Arrow data and - /// application-specific metadata in a single logical stream. In - /// contrast to DoGet/DoPut, this is more suited for clients - /// offloading computation (rather than storage) to a Flight service. - /// - /// Callers should use the `send` method on the returned object to send messages - /// to the server. The caller should send an `.end` after the final message has been sent. - /// - /// - Parameters: - /// - callOptions: Call options. - /// - handler: A closure called when each response is received from the server. - /// - Returns: A `ClientStreamingCall` with futures for the metadata and status. - internal func doExchange( - callOptions: CallOptions? = nil, - handler: @escaping (Arrow_Flight_Protocol_FlightData) -> Void - ) -> BidirectionalStreamingCall { - return self.makeBidirectionalStreamingCall( - path: Arrow_Flight_Protocol_FlightServiceClientMetadata.Methods.doExchange.path, - callOptions: callOptions ?? self.defaultCallOptions, - interceptors: self.interceptors?.makeDoExchangeInterceptors() ?? [], - handler: handler - ) - } - - /// - /// Flight services can support an arbitrary number of simple actions in - /// addition to the possible ListFlights, GetFlightInfo, DoGet, DoPut - /// operations that are potentially available. DoAction allows a flight client - /// to do a specific action against a flight service. An action includes - /// opaque request and response objects that are specific to the type action - /// being undertaken. - /// - /// - Parameters: - /// - request: Request to send to DoAction. - /// - callOptions: Call options. - /// - handler: A closure called when each response is received from the server. - /// - Returns: A `ServerStreamingCall` with futures for the metadata and status. - internal func doAction( - _ request: Arrow_Flight_Protocol_Action, - callOptions: CallOptions? = nil, - handler: @escaping (Arrow_Flight_Protocol_Result) -> Void - ) -> ServerStreamingCall { - return self.makeServerStreamingCall( - path: Arrow_Flight_Protocol_FlightServiceClientMetadata.Methods.doAction.path, - request: request, - callOptions: callOptions ?? self.defaultCallOptions, - interceptors: self.interceptors?.makeDoActionInterceptors() ?? [], - handler: handler - ) - } - - /// - /// A flight service exposes all of the available action types that it has - /// along with descriptions. This allows different flight consumers to - /// understand the capabilities of the flight service. - /// - /// - Parameters: - /// - request: Request to send to ListActions. - /// - callOptions: Call options. - /// - handler: A closure called when each response is received from the server. - /// - Returns: A `ServerStreamingCall` with futures for the metadata and status. - internal func listActions( - _ request: Arrow_Flight_Protocol_Empty, - callOptions: CallOptions? = nil, - handler: @escaping (Arrow_Flight_Protocol_ActionType) -> Void - ) -> ServerStreamingCall { - return self.makeServerStreamingCall( - path: Arrow_Flight_Protocol_FlightServiceClientMetadata.Methods.listActions.path, - request: request, - callOptions: callOptions ?? self.defaultCallOptions, - interceptors: self.interceptors?.makeListActionsInterceptors() ?? [], - handler: handler - ) - } -} - -@available(*, deprecated) -extension Arrow_Flight_Protocol_FlightServiceClient: @unchecked Sendable {} - -@available(*, deprecated, renamed: "Arrow_Flight_Protocol_FlightServiceNIOClient") -internal final class Arrow_Flight_Protocol_FlightServiceClient: Arrow_Flight_Protocol_FlightServiceClientProtocol { - private let lock = Lock() - private var _defaultCallOptions: CallOptions - private var _interceptors: Arrow_Flight_Protocol_FlightServiceClientInterceptorFactoryProtocol? - internal let channel: GRPCChannel - internal var defaultCallOptions: CallOptions { - get { self.lock.withLock { return self._defaultCallOptions } } - set { self.lock.withLockVoid { self._defaultCallOptions = newValue } } - } - internal var interceptors: Arrow_Flight_Protocol_FlightServiceClientInterceptorFactoryProtocol? { - get { self.lock.withLock { return self._interceptors } } - set { self.lock.withLockVoid { self._interceptors = newValue } } - } - - /// Creates a client for the arrow.flight.protocol.FlightService service. - /// - /// - Parameters: - /// - channel: `GRPCChannel` to the service host. - /// - defaultCallOptions: Options to use for each service call if the user doesn't provide them. - /// - interceptors: A factory providing interceptors for each RPC. - internal init( - channel: GRPCChannel, - defaultCallOptions: CallOptions = CallOptions(), - interceptors: Arrow_Flight_Protocol_FlightServiceClientInterceptorFactoryProtocol? = nil - ) { - self.channel = channel - self._defaultCallOptions = defaultCallOptions - self._interceptors = interceptors - } -} - -internal struct Arrow_Flight_Protocol_FlightServiceNIOClient: Arrow_Flight_Protocol_FlightServiceClientProtocol { - internal var channel: GRPCChannel - internal var defaultCallOptions: CallOptions - internal var interceptors: Arrow_Flight_Protocol_FlightServiceClientInterceptorFactoryProtocol? - - /// Creates a client for the arrow.flight.protocol.FlightService service. - /// - /// - Parameters: - /// - channel: `GRPCChannel` to the service host. - /// - defaultCallOptions: Options to use for each service call if the user doesn't provide them. - /// - interceptors: A factory providing interceptors for each RPC. - internal init( - channel: GRPCChannel, - defaultCallOptions: CallOptions = CallOptions(), - interceptors: Arrow_Flight_Protocol_FlightServiceClientInterceptorFactoryProtocol? = nil - ) { - self.channel = channel - self.defaultCallOptions = defaultCallOptions - self.interceptors = interceptors - } -} - -/// -/// A flight service is an endpoint for retrieving or storing Arrow data. A -/// flight service can expose one or more predefined endpoints that can be -/// accessed using the Arrow Flight Protocol. Additionally, a flight service -/// can expose a set of actions that are available. -@available(macOS 10.15, iOS 13, tvOS 13, watchOS 6, *) -internal protocol Arrow_Flight_Protocol_FlightServiceAsyncClientProtocol: GRPCClient { - static var serviceDescriptor: GRPCServiceDescriptor { get } - var interceptors: Arrow_Flight_Protocol_FlightServiceClientInterceptorFactoryProtocol? { get } - - func makeHandshakeCall( - callOptions: CallOptions? - ) -> GRPCAsyncBidirectionalStreamingCall - - func makeListFlightsCall( - _ request: Arrow_Flight_Protocol_Criteria, - callOptions: CallOptions? - ) -> GRPCAsyncServerStreamingCall - - func makeGetFlightInfoCall( - _ request: Arrow_Flight_Protocol_FlightDescriptor, - callOptions: CallOptions? - ) -> GRPCAsyncUnaryCall - - func makeGetSchemaCall( - _ request: Arrow_Flight_Protocol_FlightDescriptor, - callOptions: CallOptions? - ) -> GRPCAsyncUnaryCall - - func makeDoGetCall( - _ request: Arrow_Flight_Protocol_Ticket, - callOptions: CallOptions? - ) -> GRPCAsyncServerStreamingCall - - func makeDoPutCall( - callOptions: CallOptions? - ) -> GRPCAsyncBidirectionalStreamingCall - - func makeDoExchangeCall( - callOptions: CallOptions? - ) -> GRPCAsyncBidirectionalStreamingCall - - func makeDoActionCall( - _ request: Arrow_Flight_Protocol_Action, - callOptions: CallOptions? - ) -> GRPCAsyncServerStreamingCall - - func makeListActionsCall( - _ request: Arrow_Flight_Protocol_Empty, - callOptions: CallOptions? - ) -> GRPCAsyncServerStreamingCall -} - -@available(macOS 10.15, iOS 13, tvOS 13, watchOS 6, *) -extension Arrow_Flight_Protocol_FlightServiceAsyncClientProtocol { - internal static var serviceDescriptor: GRPCServiceDescriptor { - return Arrow_Flight_Protocol_FlightServiceClientMetadata.serviceDescriptor - } - - internal var interceptors: Arrow_Flight_Protocol_FlightServiceClientInterceptorFactoryProtocol? { - return nil - } - - internal func makeHandshakeCall( - callOptions: CallOptions? = nil - ) -> GRPCAsyncBidirectionalStreamingCall { - return self.makeAsyncBidirectionalStreamingCall( - path: Arrow_Flight_Protocol_FlightServiceClientMetadata.Methods.handshake.path, - callOptions: callOptions ?? self.defaultCallOptions, - interceptors: self.interceptors?.makeHandshakeInterceptors() ?? [] - ) - } - - internal func makeListFlightsCall( - _ request: Arrow_Flight_Protocol_Criteria, - callOptions: CallOptions? = nil - ) -> GRPCAsyncServerStreamingCall { - return self.makeAsyncServerStreamingCall( - path: Arrow_Flight_Protocol_FlightServiceClientMetadata.Methods.listFlights.path, - request: request, - callOptions: callOptions ?? self.defaultCallOptions, - interceptors: self.interceptors?.makeListFlightsInterceptors() ?? [] - ) - } - - internal func makeGetFlightInfoCall( - _ request: Arrow_Flight_Protocol_FlightDescriptor, - callOptions: CallOptions? = nil - ) -> GRPCAsyncUnaryCall { - return self.makeAsyncUnaryCall( - path: Arrow_Flight_Protocol_FlightServiceClientMetadata.Methods.getFlightInfo.path, - request: request, - callOptions: callOptions ?? self.defaultCallOptions, - interceptors: self.interceptors?.makeGetFlightInfoInterceptors() ?? [] - ) - } - - internal func makeGetSchemaCall( - _ request: Arrow_Flight_Protocol_FlightDescriptor, - callOptions: CallOptions? = nil - ) -> GRPCAsyncUnaryCall { - return self.makeAsyncUnaryCall( - path: Arrow_Flight_Protocol_FlightServiceClientMetadata.Methods.getSchema.path, - request: request, - callOptions: callOptions ?? self.defaultCallOptions, - interceptors: self.interceptors?.makeGetSchemaInterceptors() ?? [] - ) - } - - internal func makeDoGetCall( - _ request: Arrow_Flight_Protocol_Ticket, - callOptions: CallOptions? = nil - ) -> GRPCAsyncServerStreamingCall { - return self.makeAsyncServerStreamingCall( - path: Arrow_Flight_Protocol_FlightServiceClientMetadata.Methods.doGet.path, - request: request, - callOptions: callOptions ?? self.defaultCallOptions, - interceptors: self.interceptors?.makeDoGetInterceptors() ?? [] - ) - } - - internal func makeDoPutCall( - callOptions: CallOptions? = nil - ) -> GRPCAsyncBidirectionalStreamingCall { - return self.makeAsyncBidirectionalStreamingCall( - path: Arrow_Flight_Protocol_FlightServiceClientMetadata.Methods.doPut.path, - callOptions: callOptions ?? self.defaultCallOptions, - interceptors: self.interceptors?.makeDoPutInterceptors() ?? [] - ) - } - - internal func makeDoExchangeCall( - callOptions: CallOptions? = nil - ) -> GRPCAsyncBidirectionalStreamingCall { - return self.makeAsyncBidirectionalStreamingCall( - path: Arrow_Flight_Protocol_FlightServiceClientMetadata.Methods.doExchange.path, - callOptions: callOptions ?? self.defaultCallOptions, - interceptors: self.interceptors?.makeDoExchangeInterceptors() ?? [] - ) - } - - internal func makeDoActionCall( - _ request: Arrow_Flight_Protocol_Action, - callOptions: CallOptions? = nil - ) -> GRPCAsyncServerStreamingCall { - return self.makeAsyncServerStreamingCall( - path: Arrow_Flight_Protocol_FlightServiceClientMetadata.Methods.doAction.path, - request: request, - callOptions: callOptions ?? self.defaultCallOptions, - interceptors: self.interceptors?.makeDoActionInterceptors() ?? [] - ) - } - - internal func makeListActionsCall( - _ request: Arrow_Flight_Protocol_Empty, - callOptions: CallOptions? = nil - ) -> GRPCAsyncServerStreamingCall { - return self.makeAsyncServerStreamingCall( - path: Arrow_Flight_Protocol_FlightServiceClientMetadata.Methods.listActions.path, - request: request, - callOptions: callOptions ?? self.defaultCallOptions, - interceptors: self.interceptors?.makeListActionsInterceptors() ?? [] - ) - } -} - -@available(macOS 10.15, iOS 13, tvOS 13, watchOS 6, *) -extension Arrow_Flight_Protocol_FlightServiceAsyncClientProtocol { - internal func handshake( - _ requests: RequestStream, - callOptions: CallOptions? = nil - ) -> GRPCAsyncResponseStream where RequestStream: Sequence, RequestStream.Element == Arrow_Flight_Protocol_HandshakeRequest { - return self.performAsyncBidirectionalStreamingCall( - path: Arrow_Flight_Protocol_FlightServiceClientMetadata.Methods.handshake.path, - requests: requests, - callOptions: callOptions ?? self.defaultCallOptions, - interceptors: self.interceptors?.makeHandshakeInterceptors() ?? [] - ) - } - - internal func handshake( - _ requests: RequestStream, - callOptions: CallOptions? = nil - ) -> GRPCAsyncResponseStream where RequestStream: AsyncSequence & Sendable, RequestStream.Element == Arrow_Flight_Protocol_HandshakeRequest { - return self.performAsyncBidirectionalStreamingCall( - path: Arrow_Flight_Protocol_FlightServiceClientMetadata.Methods.handshake.path, - requests: requests, - callOptions: callOptions ?? self.defaultCallOptions, - interceptors: self.interceptors?.makeHandshakeInterceptors() ?? [] - ) - } - - internal func listFlights( - _ request: Arrow_Flight_Protocol_Criteria, - callOptions: CallOptions? = nil - ) -> GRPCAsyncResponseStream { - return self.performAsyncServerStreamingCall( - path: Arrow_Flight_Protocol_FlightServiceClientMetadata.Methods.listFlights.path, - request: request, - callOptions: callOptions ?? self.defaultCallOptions, - interceptors: self.interceptors?.makeListFlightsInterceptors() ?? [] - ) - } - - internal func getFlightInfo( - _ request: Arrow_Flight_Protocol_FlightDescriptor, - callOptions: CallOptions? = nil - ) async throws -> Arrow_Flight_Protocol_FlightInfo { - return try await self.performAsyncUnaryCall( - path: Arrow_Flight_Protocol_FlightServiceClientMetadata.Methods.getFlightInfo.path, - request: request, - callOptions: callOptions ?? self.defaultCallOptions, - interceptors: self.interceptors?.makeGetFlightInfoInterceptors() ?? [] - ) - } - - internal func getSchema( - _ request: Arrow_Flight_Protocol_FlightDescriptor, - callOptions: CallOptions? = nil - ) async throws -> Arrow_Flight_Protocol_SchemaResult { - return try await self.performAsyncUnaryCall( - path: Arrow_Flight_Protocol_FlightServiceClientMetadata.Methods.getSchema.path, - request: request, - callOptions: callOptions ?? self.defaultCallOptions, - interceptors: self.interceptors?.makeGetSchemaInterceptors() ?? [] - ) - } - - internal func doGet( - _ request: Arrow_Flight_Protocol_Ticket, - callOptions: CallOptions? = nil - ) -> GRPCAsyncResponseStream { - return self.performAsyncServerStreamingCall( - path: Arrow_Flight_Protocol_FlightServiceClientMetadata.Methods.doGet.path, - request: request, - callOptions: callOptions ?? self.defaultCallOptions, - interceptors: self.interceptors?.makeDoGetInterceptors() ?? [] - ) - } - - internal func doPut( - _ requests: RequestStream, - callOptions: CallOptions? = nil - ) -> GRPCAsyncResponseStream where RequestStream: Sequence, RequestStream.Element == Arrow_Flight_Protocol_FlightData { - return self.performAsyncBidirectionalStreamingCall( - path: Arrow_Flight_Protocol_FlightServiceClientMetadata.Methods.doPut.path, - requests: requests, - callOptions: callOptions ?? self.defaultCallOptions, - interceptors: self.interceptors?.makeDoPutInterceptors() ?? [] - ) - } - - internal func doPut( - _ requests: RequestStream, - callOptions: CallOptions? = nil - ) -> GRPCAsyncResponseStream where RequestStream: AsyncSequence & Sendable, RequestStream.Element == Arrow_Flight_Protocol_FlightData { - return self.performAsyncBidirectionalStreamingCall( - path: Arrow_Flight_Protocol_FlightServiceClientMetadata.Methods.doPut.path, - requests: requests, - callOptions: callOptions ?? self.defaultCallOptions, - interceptors: self.interceptors?.makeDoPutInterceptors() ?? [] - ) - } - - internal func doExchange( - _ requests: RequestStream, - callOptions: CallOptions? = nil - ) -> GRPCAsyncResponseStream where RequestStream: Sequence, RequestStream.Element == Arrow_Flight_Protocol_FlightData { - return self.performAsyncBidirectionalStreamingCall( - path: Arrow_Flight_Protocol_FlightServiceClientMetadata.Methods.doExchange.path, - requests: requests, - callOptions: callOptions ?? self.defaultCallOptions, - interceptors: self.interceptors?.makeDoExchangeInterceptors() ?? [] - ) - } - - internal func doExchange( - _ requests: RequestStream, - callOptions: CallOptions? = nil - ) -> GRPCAsyncResponseStream where RequestStream: AsyncSequence & Sendable, RequestStream.Element == Arrow_Flight_Protocol_FlightData { - return self.performAsyncBidirectionalStreamingCall( - path: Arrow_Flight_Protocol_FlightServiceClientMetadata.Methods.doExchange.path, - requests: requests, - callOptions: callOptions ?? self.defaultCallOptions, - interceptors: self.interceptors?.makeDoExchangeInterceptors() ?? [] - ) - } - - internal func doAction( - _ request: Arrow_Flight_Protocol_Action, - callOptions: CallOptions? = nil - ) -> GRPCAsyncResponseStream { - return self.performAsyncServerStreamingCall( - path: Arrow_Flight_Protocol_FlightServiceClientMetadata.Methods.doAction.path, - request: request, - callOptions: callOptions ?? self.defaultCallOptions, - interceptors: self.interceptors?.makeDoActionInterceptors() ?? [] - ) - } - - internal func listActions( - _ request: Arrow_Flight_Protocol_Empty, - callOptions: CallOptions? = nil - ) -> GRPCAsyncResponseStream { - return self.performAsyncServerStreamingCall( - path: Arrow_Flight_Protocol_FlightServiceClientMetadata.Methods.listActions.path, - request: request, - callOptions: callOptions ?? self.defaultCallOptions, - interceptors: self.interceptors?.makeListActionsInterceptors() ?? [] - ) - } -} - -@available(macOS 10.15, iOS 13, tvOS 13, watchOS 6, *) -internal struct Arrow_Flight_Protocol_FlightServiceAsyncClient: Arrow_Flight_Protocol_FlightServiceAsyncClientProtocol { - internal var channel: GRPCChannel - internal var defaultCallOptions: CallOptions - internal var interceptors: Arrow_Flight_Protocol_FlightServiceClientInterceptorFactoryProtocol? - - internal init( - channel: GRPCChannel, - defaultCallOptions: CallOptions = CallOptions(), - interceptors: Arrow_Flight_Protocol_FlightServiceClientInterceptorFactoryProtocol? = nil - ) { - self.channel = channel - self.defaultCallOptions = defaultCallOptions - self.interceptors = interceptors - } -} - -internal protocol Arrow_Flight_Protocol_FlightServiceClientInterceptorFactoryProtocol: Sendable { - - /// - Returns: Interceptors to use when invoking 'handshake'. - func makeHandshakeInterceptors() -> [ClientInterceptor] - - /// - Returns: Interceptors to use when invoking 'listFlights'. - func makeListFlightsInterceptors() -> [ClientInterceptor] - - /// - Returns: Interceptors to use when invoking 'getFlightInfo'. - func makeGetFlightInfoInterceptors() -> [ClientInterceptor] - - /// - Returns: Interceptors to use when invoking 'getSchema'. - func makeGetSchemaInterceptors() -> [ClientInterceptor] - - /// - Returns: Interceptors to use when invoking 'doGet'. - func makeDoGetInterceptors() -> [ClientInterceptor] - - /// - Returns: Interceptors to use when invoking 'doPut'. - func makeDoPutInterceptors() -> [ClientInterceptor] - - /// - Returns: Interceptors to use when invoking 'doExchange'. - func makeDoExchangeInterceptors() -> [ClientInterceptor] - - /// - Returns: Interceptors to use when invoking 'doAction'. - func makeDoActionInterceptors() -> [ClientInterceptor] - - /// - Returns: Interceptors to use when invoking 'listActions'. - func makeListActionsInterceptors() -> [ClientInterceptor] -} - -internal enum Arrow_Flight_Protocol_FlightServiceClientMetadata { - internal static let serviceDescriptor = GRPCServiceDescriptor( - name: "FlightService", - fullName: "arrow.flight.protocol.FlightService", - methods: [ - Arrow_Flight_Protocol_FlightServiceClientMetadata.Methods.handshake, - Arrow_Flight_Protocol_FlightServiceClientMetadata.Methods.listFlights, - Arrow_Flight_Protocol_FlightServiceClientMetadata.Methods.getFlightInfo, - Arrow_Flight_Protocol_FlightServiceClientMetadata.Methods.getSchema, - Arrow_Flight_Protocol_FlightServiceClientMetadata.Methods.doGet, - Arrow_Flight_Protocol_FlightServiceClientMetadata.Methods.doPut, - Arrow_Flight_Protocol_FlightServiceClientMetadata.Methods.doExchange, - Arrow_Flight_Protocol_FlightServiceClientMetadata.Methods.doAction, - Arrow_Flight_Protocol_FlightServiceClientMetadata.Methods.listActions, - ] - ) - - internal enum Methods { - internal static let handshake = GRPCMethodDescriptor( - name: "Handshake", - path: "/arrow.flight.protocol.FlightService/Handshake", - type: GRPCCallType.bidirectionalStreaming - ) - - internal static let listFlights = GRPCMethodDescriptor( - name: "ListFlights", - path: "/arrow.flight.protocol.FlightService/ListFlights", - type: GRPCCallType.serverStreaming - ) - - internal static let getFlightInfo = GRPCMethodDescriptor( - name: "GetFlightInfo", - path: "/arrow.flight.protocol.FlightService/GetFlightInfo", - type: GRPCCallType.unary - ) - - internal static let getSchema = GRPCMethodDescriptor( - name: "GetSchema", - path: "/arrow.flight.protocol.FlightService/GetSchema", - type: GRPCCallType.unary - ) - - internal static let doGet = GRPCMethodDescriptor( - name: "DoGet", - path: "/arrow.flight.protocol.FlightService/DoGet", - type: GRPCCallType.serverStreaming - ) - - internal static let doPut = GRPCMethodDescriptor( - name: "DoPut", - path: "/arrow.flight.protocol.FlightService/DoPut", - type: GRPCCallType.bidirectionalStreaming - ) - - internal static let doExchange = GRPCMethodDescriptor( - name: "DoExchange", - path: "/arrow.flight.protocol.FlightService/DoExchange", - type: GRPCCallType.bidirectionalStreaming - ) - - internal static let doAction = GRPCMethodDescriptor( - name: "DoAction", - path: "/arrow.flight.protocol.FlightService/DoAction", - type: GRPCCallType.serverStreaming - ) - - internal static let listActions = GRPCMethodDescriptor( - name: "ListActions", - path: "/arrow.flight.protocol.FlightService/ListActions", - type: GRPCCallType.serverStreaming - ) - } -} - -/// -/// A flight service is an endpoint for retrieving or storing Arrow data. A -/// flight service can expose one or more predefined endpoints that can be -/// accessed using the Arrow Flight Protocol. Additionally, a flight service -/// can expose a set of actions that are available. -/// -/// To build a server, implement a class that conforms to this protocol. -internal protocol Arrow_Flight_Protocol_FlightServiceProvider: CallHandlerProvider { - var interceptors: Arrow_Flight_Protocol_FlightServiceServerInterceptorFactoryProtocol? { get } - - /// - /// Handshake between client and server. Depending on the server, the - /// handshake may be required to determine the token that should be used for - /// future operations. Both request and response are streams to allow multiple - /// round-trips depending on auth mechanism. - func handshake(context: StreamingResponseCallContext) -> EventLoopFuture<(StreamEvent) -> Void> - - /// - /// Get a list of available streams given a particular criteria. Most flight - /// services will expose one or more streams that are readily available for - /// retrieval. This api allows listing the streams available for - /// consumption. A user can also provide a criteria. The criteria can limit - /// the subset of streams that can be listed via this interface. Each flight - /// service allows its own definition of how to consume criteria. - func listFlights(request: Arrow_Flight_Protocol_Criteria, context: StreamingResponseCallContext) -> EventLoopFuture - - /// - /// For a given FlightDescriptor, get information about how the flight can be - /// consumed. This is a useful interface if the consumer of the interface - /// already can identify the specific flight to consume. This interface can - /// also allow a consumer to generate a flight stream through a specified - /// descriptor. For example, a flight descriptor might be something that - /// includes a SQL statement or a Pickled Python operation that will be - /// executed. In those cases, the descriptor will not be previously available - /// within the list of available streams provided by ListFlights but will be - /// available for consumption for the duration defined by the specific flight - /// service. - func getFlightInfo(request: Arrow_Flight_Protocol_FlightDescriptor, context: StatusOnlyCallContext) -> EventLoopFuture - - /// - /// For a given FlightDescriptor, get the Schema as described in Schema.fbs::Schema - /// This is used when a consumer needs the Schema of flight stream. Similar to - /// GetFlightInfo this interface may generate a new flight that was not previously - /// available in ListFlights. - func getSchema(request: Arrow_Flight_Protocol_FlightDescriptor, context: StatusOnlyCallContext) -> EventLoopFuture - - /// - /// Retrieve a single stream associated with a particular descriptor - /// associated with the referenced ticket. A Flight can be composed of one or - /// more streams where each stream can be retrieved using a separate opaque - /// ticket that the flight service uses for managing a collection of streams. - func doGet(request: Arrow_Flight_Protocol_Ticket, context: StreamingResponseCallContext) -> EventLoopFuture - - /// - /// Push a stream to the flight service associated with a particular - /// flight stream. This allows a client of a flight service to upload a stream - /// of data. Depending on the particular flight service, a client consumer - /// could be allowed to upload a single stream per descriptor or an unlimited - /// number. In the latter, the service might implement a 'seal' action that - /// can be applied to a descriptor once all streams are uploaded. - func doPut(context: StreamingResponseCallContext) -> EventLoopFuture<(StreamEvent) -> Void> - - /// - /// Open a bidirectional data channel for a given descriptor. This - /// allows clients to send and receive arbitrary Arrow data and - /// application-specific metadata in a single logical stream. In - /// contrast to DoGet/DoPut, this is more suited for clients - /// offloading computation (rather than storage) to a Flight service. - func doExchange(context: StreamingResponseCallContext) -> EventLoopFuture<(StreamEvent) -> Void> - - /// - /// Flight services can support an arbitrary number of simple actions in - /// addition to the possible ListFlights, GetFlightInfo, DoGet, DoPut - /// operations that are potentially available. DoAction allows a flight client - /// to do a specific action against a flight service. An action includes - /// opaque request and response objects that are specific to the type action - /// being undertaken. - func doAction(request: Arrow_Flight_Protocol_Action, context: StreamingResponseCallContext) -> EventLoopFuture - - /// - /// A flight service exposes all of the available action types that it has - /// along with descriptions. This allows different flight consumers to - /// understand the capabilities of the flight service. - func listActions(request: Arrow_Flight_Protocol_Empty, context: StreamingResponseCallContext) -> EventLoopFuture -} - -extension Arrow_Flight_Protocol_FlightServiceProvider { - internal var serviceName: Substring { - return Arrow_Flight_Protocol_FlightServiceServerMetadata.serviceDescriptor.fullName[...] - } - - /// Determines, calls and returns the appropriate request handler, depending on the request's method. - /// Returns nil for methods not handled by this service. - internal func handle( - method name: Substring, - context: CallHandlerContext - ) -> GRPCServerHandlerProtocol? { - switch name { - case "Handshake": - return BidirectionalStreamingServerHandler( - context: context, - requestDeserializer: ProtobufDeserializer(), - responseSerializer: ProtobufSerializer(), - interceptors: self.interceptors?.makeHandshakeInterceptors() ?? [], - observerFactory: self.handshake(context:) - ) - - case "ListFlights": - return ServerStreamingServerHandler( - context: context, - requestDeserializer: ProtobufDeserializer(), - responseSerializer: ProtobufSerializer(), - interceptors: self.interceptors?.makeListFlightsInterceptors() ?? [], - userFunction: self.listFlights(request:context:) - ) - - case "GetFlightInfo": - return UnaryServerHandler( - context: context, - requestDeserializer: ProtobufDeserializer(), - responseSerializer: ProtobufSerializer(), - interceptors: self.interceptors?.makeGetFlightInfoInterceptors() ?? [], - userFunction: self.getFlightInfo(request:context:) - ) - - case "GetSchema": - return UnaryServerHandler( - context: context, - requestDeserializer: ProtobufDeserializer(), - responseSerializer: ProtobufSerializer(), - interceptors: self.interceptors?.makeGetSchemaInterceptors() ?? [], - userFunction: self.getSchema(request:context:) - ) - - case "DoGet": - return ServerStreamingServerHandler( - context: context, - requestDeserializer: ProtobufDeserializer(), - responseSerializer: ProtobufSerializer(), - interceptors: self.interceptors?.makeDoGetInterceptors() ?? [], - userFunction: self.doGet(request:context:) - ) - - case "DoPut": - return BidirectionalStreamingServerHandler( - context: context, - requestDeserializer: ProtobufDeserializer(), - responseSerializer: ProtobufSerializer(), - interceptors: self.interceptors?.makeDoPutInterceptors() ?? [], - observerFactory: self.doPut(context:) - ) - - case "DoExchange": - return BidirectionalStreamingServerHandler( - context: context, - requestDeserializer: ProtobufDeserializer(), - responseSerializer: ProtobufSerializer(), - interceptors: self.interceptors?.makeDoExchangeInterceptors() ?? [], - observerFactory: self.doExchange(context:) - ) - - case "DoAction": - return ServerStreamingServerHandler( - context: context, - requestDeserializer: ProtobufDeserializer(), - responseSerializer: ProtobufSerializer(), - interceptors: self.interceptors?.makeDoActionInterceptors() ?? [], - userFunction: self.doAction(request:context:) - ) - - case "ListActions": - return ServerStreamingServerHandler( - context: context, - requestDeserializer: ProtobufDeserializer(), - responseSerializer: ProtobufSerializer(), - interceptors: self.interceptors?.makeListActionsInterceptors() ?? [], - userFunction: self.listActions(request:context:) - ) - - default: - return nil - } - } -} - -/// -/// A flight service is an endpoint for retrieving or storing Arrow data. A -/// flight service can expose one or more predefined endpoints that can be -/// accessed using the Arrow Flight Protocol. Additionally, a flight service -/// can expose a set of actions that are available. -/// -/// To implement a server, implement an object which conforms to this protocol. -@available(macOS 10.15, iOS 13, tvOS 13, watchOS 6, *) -internal protocol Arrow_Flight_Protocol_FlightServiceAsyncProvider: CallHandlerProvider, Sendable { - static var serviceDescriptor: GRPCServiceDescriptor { get } - var interceptors: Arrow_Flight_Protocol_FlightServiceServerInterceptorFactoryProtocol? { get } - - /// - /// Handshake between client and server. Depending on the server, the - /// handshake may be required to determine the token that should be used for - /// future operations. Both request and response are streams to allow multiple - /// round-trips depending on auth mechanism. - func handshake( - requestStream: GRPCAsyncRequestStream, - responseStream: GRPCAsyncResponseStreamWriter, - context: GRPCAsyncServerCallContext - ) async throws - - /// - /// Get a list of available streams given a particular criteria. Most flight - /// services will expose one or more streams that are readily available for - /// retrieval. This api allows listing the streams available for - /// consumption. A user can also provide a criteria. The criteria can limit - /// the subset of streams that can be listed via this interface. Each flight - /// service allows its own definition of how to consume criteria. - func listFlights( - request: Arrow_Flight_Protocol_Criteria, - responseStream: GRPCAsyncResponseStreamWriter, - context: GRPCAsyncServerCallContext - ) async throws - - /// - /// For a given FlightDescriptor, get information about how the flight can be - /// consumed. This is a useful interface if the consumer of the interface - /// already can identify the specific flight to consume. This interface can - /// also allow a consumer to generate a flight stream through a specified - /// descriptor. For example, a flight descriptor might be something that - /// includes a SQL statement or a Pickled Python operation that will be - /// executed. In those cases, the descriptor will not be previously available - /// within the list of available streams provided by ListFlights but will be - /// available for consumption for the duration defined by the specific flight - /// service. - func getFlightInfo( - request: Arrow_Flight_Protocol_FlightDescriptor, - context: GRPCAsyncServerCallContext - ) async throws -> Arrow_Flight_Protocol_FlightInfo - - /// - /// For a given FlightDescriptor, get the Schema as described in Schema.fbs::Schema - /// This is used when a consumer needs the Schema of flight stream. Similar to - /// GetFlightInfo this interface may generate a new flight that was not previously - /// available in ListFlights. - func getSchema( - request: Arrow_Flight_Protocol_FlightDescriptor, - context: GRPCAsyncServerCallContext - ) async throws -> Arrow_Flight_Protocol_SchemaResult - - /// - /// Retrieve a single stream associated with a particular descriptor - /// associated with the referenced ticket. A Flight can be composed of one or - /// more streams where each stream can be retrieved using a separate opaque - /// ticket that the flight service uses for managing a collection of streams. - func doGet( - request: Arrow_Flight_Protocol_Ticket, - responseStream: GRPCAsyncResponseStreamWriter, - context: GRPCAsyncServerCallContext - ) async throws - - /// - /// Push a stream to the flight service associated with a particular - /// flight stream. This allows a client of a flight service to upload a stream - /// of data. Depending on the particular flight service, a client consumer - /// could be allowed to upload a single stream per descriptor or an unlimited - /// number. In the latter, the service might implement a 'seal' action that - /// can be applied to a descriptor once all streams are uploaded. - func doPut( - requestStream: GRPCAsyncRequestStream, - responseStream: GRPCAsyncResponseStreamWriter, - context: GRPCAsyncServerCallContext - ) async throws - - /// - /// Open a bidirectional data channel for a given descriptor. This - /// allows clients to send and receive arbitrary Arrow data and - /// application-specific metadata in a single logical stream. In - /// contrast to DoGet/DoPut, this is more suited for clients - /// offloading computation (rather than storage) to a Flight service. - func doExchange( - requestStream: GRPCAsyncRequestStream, - responseStream: GRPCAsyncResponseStreamWriter, - context: GRPCAsyncServerCallContext - ) async throws - - /// - /// Flight services can support an arbitrary number of simple actions in - /// addition to the possible ListFlights, GetFlightInfo, DoGet, DoPut - /// operations that are potentially available. DoAction allows a flight client - /// to do a specific action against a flight service. An action includes - /// opaque request and response objects that are specific to the type action - /// being undertaken. - func doAction( - request: Arrow_Flight_Protocol_Action, - responseStream: GRPCAsyncResponseStreamWriter, - context: GRPCAsyncServerCallContext - ) async throws - - /// - /// A flight service exposes all of the available action types that it has - /// along with descriptions. This allows different flight consumers to - /// understand the capabilities of the flight service. - func listActions( - request: Arrow_Flight_Protocol_Empty, - responseStream: GRPCAsyncResponseStreamWriter, - context: GRPCAsyncServerCallContext - ) async throws -} - -@available(macOS 10.15, iOS 13, tvOS 13, watchOS 6, *) -extension Arrow_Flight_Protocol_FlightServiceAsyncProvider { - internal static var serviceDescriptor: GRPCServiceDescriptor { - return Arrow_Flight_Protocol_FlightServiceServerMetadata.serviceDescriptor - } - - internal var serviceName: Substring { - return Arrow_Flight_Protocol_FlightServiceServerMetadata.serviceDescriptor.fullName[...] - } - - internal var interceptors: Arrow_Flight_Protocol_FlightServiceServerInterceptorFactoryProtocol? { - return nil - } - - internal func handle( - method name: Substring, - context: CallHandlerContext - ) -> GRPCServerHandlerProtocol? { - switch name { - case "Handshake": - return GRPCAsyncServerHandler( - context: context, - requestDeserializer: ProtobufDeserializer(), - responseSerializer: ProtobufSerializer(), - interceptors: self.interceptors?.makeHandshakeInterceptors() ?? [], - wrapping: { try await self.handshake(requestStream: $0, responseStream: $1, context: $2) } - ) - - case "ListFlights": - return GRPCAsyncServerHandler( - context: context, - requestDeserializer: ProtobufDeserializer(), - responseSerializer: ProtobufSerializer(), - interceptors: self.interceptors?.makeListFlightsInterceptors() ?? [], - wrapping: { try await self.listFlights(request: $0, responseStream: $1, context: $2) } - ) - - case "GetFlightInfo": - return GRPCAsyncServerHandler( - context: context, - requestDeserializer: ProtobufDeserializer(), - responseSerializer: ProtobufSerializer(), - interceptors: self.interceptors?.makeGetFlightInfoInterceptors() ?? [], - wrapping: { try await self.getFlightInfo(request: $0, context: $1) } - ) - - case "GetSchema": - return GRPCAsyncServerHandler( - context: context, - requestDeserializer: ProtobufDeserializer(), - responseSerializer: ProtobufSerializer(), - interceptors: self.interceptors?.makeGetSchemaInterceptors() ?? [], - wrapping: { try await self.getSchema(request: $0, context: $1) } - ) - - case "DoGet": - return GRPCAsyncServerHandler( - context: context, - requestDeserializer: ProtobufDeserializer(), - responseSerializer: ProtobufSerializer(), - interceptors: self.interceptors?.makeDoGetInterceptors() ?? [], - wrapping: { try await self.doGet(request: $0, responseStream: $1, context: $2) } - ) - - case "DoPut": - return GRPCAsyncServerHandler( - context: context, - requestDeserializer: ProtobufDeserializer(), - responseSerializer: ProtobufSerializer(), - interceptors: self.interceptors?.makeDoPutInterceptors() ?? [], - wrapping: { try await self.doPut(requestStream: $0, responseStream: $1, context: $2) } - ) - - case "DoExchange": - return GRPCAsyncServerHandler( - context: context, - requestDeserializer: ProtobufDeserializer(), - responseSerializer: ProtobufSerializer(), - interceptors: self.interceptors?.makeDoExchangeInterceptors() ?? [], - wrapping: { try await self.doExchange(requestStream: $0, responseStream: $1, context: $2) } - ) - - case "DoAction": - return GRPCAsyncServerHandler( - context: context, - requestDeserializer: ProtobufDeserializer(), - responseSerializer: ProtobufSerializer(), - interceptors: self.interceptors?.makeDoActionInterceptors() ?? [], - wrapping: { try await self.doAction(request: $0, responseStream: $1, context: $2) } - ) - - case "ListActions": - return GRPCAsyncServerHandler( - context: context, - requestDeserializer: ProtobufDeserializer(), - responseSerializer: ProtobufSerializer(), - interceptors: self.interceptors?.makeListActionsInterceptors() ?? [], - wrapping: { try await self.listActions(request: $0, responseStream: $1, context: $2) } - ) - - default: - return nil - } - } -} - -internal protocol Arrow_Flight_Protocol_FlightServiceServerInterceptorFactoryProtocol: Sendable { - - /// - Returns: Interceptors to use when handling 'handshake'. - /// Defaults to calling `self.makeInterceptors()`. - func makeHandshakeInterceptors() -> [ServerInterceptor] - - /// - Returns: Interceptors to use when handling 'listFlights'. - /// Defaults to calling `self.makeInterceptors()`. - func makeListFlightsInterceptors() -> [ServerInterceptor] - - /// - Returns: Interceptors to use when handling 'getFlightInfo'. - /// Defaults to calling `self.makeInterceptors()`. - func makeGetFlightInfoInterceptors() -> [ServerInterceptor] - - /// - Returns: Interceptors to use when handling 'getSchema'. - /// Defaults to calling `self.makeInterceptors()`. - func makeGetSchemaInterceptors() -> [ServerInterceptor] - - /// - Returns: Interceptors to use when handling 'doGet'. - /// Defaults to calling `self.makeInterceptors()`. - func makeDoGetInterceptors() -> [ServerInterceptor] - - /// - Returns: Interceptors to use when handling 'doPut'. - /// Defaults to calling `self.makeInterceptors()`. - func makeDoPutInterceptors() -> [ServerInterceptor] - - /// - Returns: Interceptors to use when handling 'doExchange'. - /// Defaults to calling `self.makeInterceptors()`. - func makeDoExchangeInterceptors() -> [ServerInterceptor] - - /// - Returns: Interceptors to use when handling 'doAction'. - /// Defaults to calling `self.makeInterceptors()`. - func makeDoActionInterceptors() -> [ServerInterceptor] - - /// - Returns: Interceptors to use when handling 'listActions'. - /// Defaults to calling `self.makeInterceptors()`. - func makeListActionsInterceptors() -> [ServerInterceptor] -} - -internal enum Arrow_Flight_Protocol_FlightServiceServerMetadata { - internal static let serviceDescriptor = GRPCServiceDescriptor( - name: "FlightService", - fullName: "arrow.flight.protocol.FlightService", - methods: [ - Arrow_Flight_Protocol_FlightServiceServerMetadata.Methods.handshake, - Arrow_Flight_Protocol_FlightServiceServerMetadata.Methods.listFlights, - Arrow_Flight_Protocol_FlightServiceServerMetadata.Methods.getFlightInfo, - Arrow_Flight_Protocol_FlightServiceServerMetadata.Methods.getSchema, - Arrow_Flight_Protocol_FlightServiceServerMetadata.Methods.doGet, - Arrow_Flight_Protocol_FlightServiceServerMetadata.Methods.doPut, - Arrow_Flight_Protocol_FlightServiceServerMetadata.Methods.doExchange, - Arrow_Flight_Protocol_FlightServiceServerMetadata.Methods.doAction, - Arrow_Flight_Protocol_FlightServiceServerMetadata.Methods.listActions, - ] - ) - - internal enum Methods { - internal static let handshake = GRPCMethodDescriptor( - name: "Handshake", - path: "/arrow.flight.protocol.FlightService/Handshake", - type: GRPCCallType.bidirectionalStreaming - ) - - internal static let listFlights = GRPCMethodDescriptor( - name: "ListFlights", - path: "/arrow.flight.protocol.FlightService/ListFlights", - type: GRPCCallType.serverStreaming - ) - - internal static let getFlightInfo = GRPCMethodDescriptor( - name: "GetFlightInfo", - path: "/arrow.flight.protocol.FlightService/GetFlightInfo", - type: GRPCCallType.unary - ) - - internal static let getSchema = GRPCMethodDescriptor( - name: "GetSchema", - path: "/arrow.flight.protocol.FlightService/GetSchema", - type: GRPCCallType.unary - ) - - internal static let doGet = GRPCMethodDescriptor( - name: "DoGet", - path: "/arrow.flight.protocol.FlightService/DoGet", - type: GRPCCallType.serverStreaming - ) - - internal static let doPut = GRPCMethodDescriptor( - name: "DoPut", - path: "/arrow.flight.protocol.FlightService/DoPut", - type: GRPCCallType.bidirectionalStreaming - ) - - internal static let doExchange = GRPCMethodDescriptor( - name: "DoExchange", - path: "/arrow.flight.protocol.FlightService/DoExchange", - type: GRPCCallType.bidirectionalStreaming - ) - - internal static let doAction = GRPCMethodDescriptor( - name: "DoAction", - path: "/arrow.flight.protocol.FlightService/DoAction", - type: GRPCCallType.serverStreaming - ) - - internal static let listActions = GRPCMethodDescriptor( - name: "ListActions", - path: "/arrow.flight.protocol.FlightService/ListActions", - type: GRPCCallType.serverStreaming - ) - } -} diff --git a/swift/ArrowFlight/Sources/ArrowFlight/Flight.pb.swift b/swift/ArrowFlight/Sources/ArrowFlight/Flight.pb.swift deleted file mode 100644 index b50d4062529..00000000000 --- a/swift/ArrowFlight/Sources/ArrowFlight/Flight.pb.swift +++ /dev/null @@ -1,1366 +0,0 @@ -// DO NOT EDIT. -// swift-format-ignore-file -// -// Generated by the Swift generator plugin for the protocol buffer compiler. -// Source: Flight.proto -// -// For information on using the generated types, please see the documentation: -// https://github.com/apple/swift-protobuf/ - -// -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -//

-// http://www.apache.org/licenses/LICENSE-2.0 -//

-// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -import Foundation -import SwiftProtobuf - -// If the compiler emits an error on this type, it is because this file -// was generated by a version of the `protoc` Swift plug-in that is -// incompatible with the version of SwiftProtobuf to which you are linking. -// Please ensure that you are building against the same version of the API -// that was used to generate this file. -fileprivate struct _GeneratedWithProtocGenSwiftVersion: SwiftProtobuf.ProtobufAPIVersionCheck { - struct _2: SwiftProtobuf.ProtobufAPIVersion_2 {} - typealias Version = _2 -} - -/// -/// The result of a cancel operation. -/// -/// This is used by CancelFlightInfoResult.status. -enum Arrow_Flight_Protocol_CancelStatus: SwiftProtobuf.Enum { - typealias RawValue = Int - - /// The cancellation status is unknown. Servers should avoid using - /// this value (send a NOT_FOUND error if the requested query is - /// not known). Clients can retry the request. - case unspecified // = 0 - - /// The cancellation request is complete. Subsequent requests with - /// the same payload may return CANCELLED or a NOT_FOUND error. - case cancelled // = 1 - - /// The cancellation request is in progress. The client may retry - /// the cancellation request. - case cancelling // = 2 - - /// The query is not cancellable. The client should not retry the - /// cancellation request. - case notCancellable // = 3 - case UNRECOGNIZED(Int) - - init() { - self = .unspecified - } - - init?(rawValue: Int) { - switch rawValue { - case 0: self = .unspecified - case 1: self = .cancelled - case 2: self = .cancelling - case 3: self = .notCancellable - default: self = .UNRECOGNIZED(rawValue) - } - } - - var rawValue: Int { - switch self { - case .unspecified: return 0 - case .cancelled: return 1 - case .cancelling: return 2 - case .notCancellable: return 3 - case .UNRECOGNIZED(let i): return i - } - } - -} - -#if swift(>=4.2) - -extension Arrow_Flight_Protocol_CancelStatus: CaseIterable { - // The compiler won't synthesize support with the UNRECOGNIZED case. - static var allCases: [Arrow_Flight_Protocol_CancelStatus] = [ - .unspecified, - .cancelled, - .cancelling, - .notCancellable, - ] -} - -#endif // swift(>=4.2) - -/// -/// The request that a client provides to a server on handshake. -struct Arrow_Flight_Protocol_HandshakeRequest { - // SwiftProtobuf.Message conformance is added in an extension below. See the - // `Message` and `Message+*Additions` files in the SwiftProtobuf library for - // methods supported on all messages. - - /// - /// A defined protocol version - var protocolVersion: UInt64 = 0 - - /// - /// Arbitrary auth/handshake info. - var payload: Data = Data() - - var unknownFields = SwiftProtobuf.UnknownStorage() - - init() {} -} - -struct Arrow_Flight_Protocol_HandshakeResponse { - // SwiftProtobuf.Message conformance is added in an extension below. See the - // `Message` and `Message+*Additions` files in the SwiftProtobuf library for - // methods supported on all messages. - - /// - /// A defined protocol version - var protocolVersion: UInt64 = 0 - - /// - /// Arbitrary auth/handshake info. - var payload: Data = Data() - - var unknownFields = SwiftProtobuf.UnknownStorage() - - init() {} -} - -/// -/// A message for doing simple auth. -struct Arrow_Flight_Protocol_BasicAuth { - // SwiftProtobuf.Message conformance is added in an extension below. See the - // `Message` and `Message+*Additions` files in the SwiftProtobuf library for - // methods supported on all messages. - - var username: String = String() - - var password: String = String() - - var unknownFields = SwiftProtobuf.UnknownStorage() - - init() {} -} - -struct Arrow_Flight_Protocol_Empty { - // SwiftProtobuf.Message conformance is added in an extension below. See the - // `Message` and `Message+*Additions` files in the SwiftProtobuf library for - // methods supported on all messages. - - var unknownFields = SwiftProtobuf.UnknownStorage() - - init() {} -} - -/// -/// Describes an available action, including both the name used for execution -/// along with a short description of the purpose of the action. -struct Arrow_Flight_Protocol_ActionType { - // SwiftProtobuf.Message conformance is added in an extension below. See the - // `Message` and `Message+*Additions` files in the SwiftProtobuf library for - // methods supported on all messages. - - var type: String = String() - - var description_p: String = String() - - var unknownFields = SwiftProtobuf.UnknownStorage() - - init() {} -} - -/// -/// A service specific expression that can be used to return a limited set -/// of available Arrow Flight streams. -struct Arrow_Flight_Protocol_Criteria { - // SwiftProtobuf.Message conformance is added in an extension below. See the - // `Message` and `Message+*Additions` files in the SwiftProtobuf library for - // methods supported on all messages. - - var expression: Data = Data() - - var unknownFields = SwiftProtobuf.UnknownStorage() - - init() {} -} - -/// -/// An opaque action specific for the service. -struct Arrow_Flight_Protocol_Action { - // SwiftProtobuf.Message conformance is added in an extension below. See the - // `Message` and `Message+*Additions` files in the SwiftProtobuf library for - // methods supported on all messages. - - var type: String = String() - - var body: Data = Data() - - var unknownFields = SwiftProtobuf.UnknownStorage() - - init() {} -} - -/// -/// The request of the CancelFlightInfo action. -/// -/// The request should be stored in Action.body. -struct Arrow_Flight_Protocol_CancelFlightInfoRequest { - // SwiftProtobuf.Message conformance is added in an extension below. See the - // `Message` and `Message+*Additions` files in the SwiftProtobuf library for - // methods supported on all messages. - - var info: Arrow_Flight_Protocol_FlightInfo { - get {return _info ?? Arrow_Flight_Protocol_FlightInfo()} - set {_info = newValue} - } - /// Returns true if `info` has been explicitly set. - var hasInfo: Bool {return self._info != nil} - /// Clears the value of `info`. Subsequent reads from it will return its default value. - mutating func clearInfo() {self._info = nil} - - var unknownFields = SwiftProtobuf.UnknownStorage() - - init() {} - - fileprivate var _info: Arrow_Flight_Protocol_FlightInfo? = nil -} - -/// -/// The request of the RenewFlightEndpoint action. -/// -/// The request should be stored in Action.body. -struct Arrow_Flight_Protocol_RenewFlightEndpointRequest { - // SwiftProtobuf.Message conformance is added in an extension below. See the - // `Message` and `Message+*Additions` files in the SwiftProtobuf library for - // methods supported on all messages. - - var endpoint: Arrow_Flight_Protocol_FlightEndpoint { - get {return _endpoint ?? Arrow_Flight_Protocol_FlightEndpoint()} - set {_endpoint = newValue} - } - /// Returns true if `endpoint` has been explicitly set. - var hasEndpoint: Bool {return self._endpoint != nil} - /// Clears the value of `endpoint`. Subsequent reads from it will return its default value. - mutating func clearEndpoint() {self._endpoint = nil} - - var unknownFields = SwiftProtobuf.UnknownStorage() - - init() {} - - fileprivate var _endpoint: Arrow_Flight_Protocol_FlightEndpoint? = nil -} - -/// -/// An opaque result returned after executing an action. -struct Arrow_Flight_Protocol_Result { - // SwiftProtobuf.Message conformance is added in an extension below. See the - // `Message` and `Message+*Additions` files in the SwiftProtobuf library for - // methods supported on all messages. - - var body: Data = Data() - - var unknownFields = SwiftProtobuf.UnknownStorage() - - init() {} -} - -/// -/// The result of the CancelFlightInfo action. -/// -/// The result should be stored in Result.body. -struct Arrow_Flight_Protocol_CancelFlightInfoResult { - // SwiftProtobuf.Message conformance is added in an extension below. See the - // `Message` and `Message+*Additions` files in the SwiftProtobuf library for - // methods supported on all messages. - - var status: Arrow_Flight_Protocol_CancelStatus = .unspecified - - var unknownFields = SwiftProtobuf.UnknownStorage() - - init() {} -} - -/// -/// Wrap the result of a getSchema call -struct Arrow_Flight_Protocol_SchemaResult { - // SwiftProtobuf.Message conformance is added in an extension below. See the - // `Message` and `Message+*Additions` files in the SwiftProtobuf library for - // methods supported on all messages. - - /// The schema of the dataset in its IPC form: - /// 4 bytes - an optional IPC_CONTINUATION_TOKEN prefix - /// 4 bytes - the byte length of the payload - /// a flatbuffer Message whose header is the Schema - var schema: Data = Data() - - var unknownFields = SwiftProtobuf.UnknownStorage() - - init() {} -} - -/// -/// The name or tag for a Flight. May be used as a way to retrieve or generate -/// a flight or be used to expose a set of previously defined flights. -struct Arrow_Flight_Protocol_FlightDescriptor { - // SwiftProtobuf.Message conformance is added in an extension below. See the - // `Message` and `Message+*Additions` files in the SwiftProtobuf library for - // methods supported on all messages. - - var type: Arrow_Flight_Protocol_FlightDescriptor.DescriptorType = .unknown - - /// - /// Opaque value used to express a command. Should only be defined when - /// type = CMD. - var cmd: Data = Data() - - /// - /// List of strings identifying a particular dataset. Should only be defined - /// when type = PATH. - var path: [String] = [] - - var unknownFields = SwiftProtobuf.UnknownStorage() - - /// - /// Describes what type of descriptor is defined. - enum DescriptorType: SwiftProtobuf.Enum { - typealias RawValue = Int - - /// Protobuf pattern, not used. - case unknown // = 0 - - /// - /// A named path that identifies a dataset. A path is composed of a string - /// or list of strings describing a particular dataset. This is conceptually - /// similar to a path inside a filesystem. - case path // = 1 - - /// - /// An opaque command to generate a dataset. - case cmd // = 2 - case UNRECOGNIZED(Int) - - init() { - self = .unknown - } - - init?(rawValue: Int) { - switch rawValue { - case 0: self = .unknown - case 1: self = .path - case 2: self = .cmd - default: self = .UNRECOGNIZED(rawValue) - } - } - - var rawValue: Int { - switch self { - case .unknown: return 0 - case .path: return 1 - case .cmd: return 2 - case .UNRECOGNIZED(let i): return i - } - } - - } - - init() {} -} - -#if swift(>=4.2) - -extension Arrow_Flight_Protocol_FlightDescriptor.DescriptorType: CaseIterable { - // The compiler won't synthesize support with the UNRECOGNIZED case. - static var allCases: [Arrow_Flight_Protocol_FlightDescriptor.DescriptorType] = [ - .unknown, - .path, - .cmd, - ] -} - -#endif // swift(>=4.2) - -/// -/// The access coordinates for retrieval of a dataset. With a FlightInfo, a -/// consumer is able to determine how to retrieve a dataset. -struct Arrow_Flight_Protocol_FlightInfo { - // SwiftProtobuf.Message conformance is added in an extension below. See the - // `Message` and `Message+*Additions` files in the SwiftProtobuf library for - // methods supported on all messages. - - /// The schema of the dataset in its IPC form: - /// 4 bytes - an optional IPC_CONTINUATION_TOKEN prefix - /// 4 bytes - the byte length of the payload - /// a flatbuffer Message whose header is the Schema - var schema: Data = Data() - - /// - /// The descriptor associated with this info. - var flightDescriptor: Arrow_Flight_Protocol_FlightDescriptor { - get {return _flightDescriptor ?? Arrow_Flight_Protocol_FlightDescriptor()} - set {_flightDescriptor = newValue} - } - /// Returns true if `flightDescriptor` has been explicitly set. - var hasFlightDescriptor: Bool {return self._flightDescriptor != nil} - /// Clears the value of `flightDescriptor`. Subsequent reads from it will return its default value. - mutating func clearFlightDescriptor() {self._flightDescriptor = nil} - - /// - /// A list of endpoints associated with the flight. To consume the - /// whole flight, all endpoints (and hence all Tickets) must be - /// consumed. Endpoints can be consumed in any order. - /// - /// In other words, an application can use multiple endpoints to - /// represent partitioned data. - /// - /// If the returned data has an ordering, an application can use - /// "FlightInfo.ordered = true" or should return the all data in a - /// single endpoint. Otherwise, there is no ordering defined on - /// endpoints or the data within. - /// - /// A client can read ordered data by reading data from returned - /// endpoints, in order, from front to back. - /// - /// Note that a client may ignore "FlightInfo.ordered = true". If an - /// ordering is important for an application, an application must - /// choose one of them: - /// - /// * An application requires that all clients must read data in - /// returned endpoints order. - /// * An application must return the all data in a single endpoint. - var endpoint: [Arrow_Flight_Protocol_FlightEndpoint] = [] - - /// Set these to -1 if unknown. - var totalRecords: Int64 = 0 - - var totalBytes: Int64 = 0 - - /// - /// FlightEndpoints are in the same order as the data. - var ordered: Bool = false - - var unknownFields = SwiftProtobuf.UnknownStorage() - - init() {} - - fileprivate var _flightDescriptor: Arrow_Flight_Protocol_FlightDescriptor? = nil -} - -/// -/// A particular stream or split associated with a flight. -struct Arrow_Flight_Protocol_FlightEndpoint { - // SwiftProtobuf.Message conformance is added in an extension below. See the - // `Message` and `Message+*Additions` files in the SwiftProtobuf library for - // methods supported on all messages. - - /// - /// Token used to retrieve this stream. - var ticket: Arrow_Flight_Protocol_Ticket { - get {return _ticket ?? Arrow_Flight_Protocol_Ticket()} - set {_ticket = newValue} - } - /// Returns true if `ticket` has been explicitly set. - var hasTicket: Bool {return self._ticket != nil} - /// Clears the value of `ticket`. Subsequent reads from it will return its default value. - mutating func clearTicket() {self._ticket = nil} - - /// - /// A list of URIs where this ticket can be redeemed via DoGet(). - /// - /// If the list is empty, the expectation is that the ticket can only - /// be redeemed on the current service where the ticket was - /// generated. - /// - /// If the list is not empty, the expectation is that the ticket can - /// be redeemed at any of the locations, and that the data returned - /// will be equivalent. In this case, the ticket may only be redeemed - /// at one of the given locations, and not (necessarily) on the - /// current service. - /// - /// In other words, an application can use multiple locations to - /// represent redundant and/or load balanced services. - var location: [Arrow_Flight_Protocol_Location] = [] - - /// - /// Expiration time of this stream. If present, clients may assume - /// they can retry DoGet requests. Otherwise, it is - /// application-defined whether DoGet requests may be retried. - var expirationTime: SwiftProtobuf.Google_Protobuf_Timestamp { - get {return _expirationTime ?? SwiftProtobuf.Google_Protobuf_Timestamp()} - set {_expirationTime = newValue} - } - /// Returns true if `expirationTime` has been explicitly set. - var hasExpirationTime: Bool {return self._expirationTime != nil} - /// Clears the value of `expirationTime`. Subsequent reads from it will return its default value. - mutating func clearExpirationTime() {self._expirationTime = nil} - - var unknownFields = SwiftProtobuf.UnknownStorage() - - init() {} - - fileprivate var _ticket: Arrow_Flight_Protocol_Ticket? = nil - fileprivate var _expirationTime: SwiftProtobuf.Google_Protobuf_Timestamp? = nil -} - -/// -/// A location where a Flight service will accept retrieval of a particular -/// stream given a ticket. -struct Arrow_Flight_Protocol_Location { - // SwiftProtobuf.Message conformance is added in an extension below. See the - // `Message` and `Message+*Additions` files in the SwiftProtobuf library for - // methods supported on all messages. - - var uri: String = String() - - var unknownFields = SwiftProtobuf.UnknownStorage() - - init() {} -} - -/// -/// An opaque identifier that the service can use to retrieve a particular -/// portion of a stream. -/// -/// Tickets are meant to be single use. It is an error/application-defined -/// behavior to reuse a ticket. -struct Arrow_Flight_Protocol_Ticket { - // SwiftProtobuf.Message conformance is added in an extension below. See the - // `Message` and `Message+*Additions` files in the SwiftProtobuf library for - // methods supported on all messages. - - var ticket: Data = Data() - - var unknownFields = SwiftProtobuf.UnknownStorage() - - init() {} -} - -/// -/// A batch of Arrow data as part of a stream of batches. -struct Arrow_Flight_Protocol_FlightData { - // SwiftProtobuf.Message conformance is added in an extension below. See the - // `Message` and `Message+*Additions` files in the SwiftProtobuf library for - // methods supported on all messages. - - /// - /// The descriptor of the data. This is only relevant when a client is - /// starting a new DoPut stream. - var flightDescriptor: Arrow_Flight_Protocol_FlightDescriptor { - get {return _flightDescriptor ?? Arrow_Flight_Protocol_FlightDescriptor()} - set {_flightDescriptor = newValue} - } - /// Returns true if `flightDescriptor` has been explicitly set. - var hasFlightDescriptor: Bool {return self._flightDescriptor != nil} - /// Clears the value of `flightDescriptor`. Subsequent reads from it will return its default value. - mutating func clearFlightDescriptor() {self._flightDescriptor = nil} - - /// - /// Header for message data as described in Message.fbs::Message. - var dataHeader: Data = Data() - - /// - /// Application-defined metadata. - var appMetadata: Data = Data() - - /// - /// The actual batch of Arrow data. Preferably handled with minimal-copies - /// coming last in the definition to help with sidecar patterns (it is - /// expected that some implementations will fetch this field off the wire - /// with specialized code to avoid extra memory copies). - var dataBody: Data = Data() - - var unknownFields = SwiftProtobuf.UnknownStorage() - - init() {} - - fileprivate var _flightDescriptor: Arrow_Flight_Protocol_FlightDescriptor? = nil -} - -///* -/// The response message associated with the submission of a DoPut. -struct Arrow_Flight_Protocol_PutResult { - // SwiftProtobuf.Message conformance is added in an extension below. See the - // `Message` and `Message+*Additions` files in the SwiftProtobuf library for - // methods supported on all messages. - - var appMetadata: Data = Data() - - var unknownFields = SwiftProtobuf.UnknownStorage() - - init() {} -} - -#if swift(>=5.5) && canImport(_Concurrency) -extension Arrow_Flight_Protocol_CancelStatus: @unchecked Sendable {} -extension Arrow_Flight_Protocol_HandshakeRequest: @unchecked Sendable {} -extension Arrow_Flight_Protocol_HandshakeResponse: @unchecked Sendable {} -extension Arrow_Flight_Protocol_BasicAuth: @unchecked Sendable {} -extension Arrow_Flight_Protocol_Empty: @unchecked Sendable {} -extension Arrow_Flight_Protocol_ActionType: @unchecked Sendable {} -extension Arrow_Flight_Protocol_Criteria: @unchecked Sendable {} -extension Arrow_Flight_Protocol_Action: @unchecked Sendable {} -extension Arrow_Flight_Protocol_CancelFlightInfoRequest: @unchecked Sendable {} -extension Arrow_Flight_Protocol_RenewFlightEndpointRequest: @unchecked Sendable {} -extension Arrow_Flight_Protocol_Result: @unchecked Sendable {} -extension Arrow_Flight_Protocol_CancelFlightInfoResult: @unchecked Sendable {} -extension Arrow_Flight_Protocol_SchemaResult: @unchecked Sendable {} -extension Arrow_Flight_Protocol_FlightDescriptor: @unchecked Sendable {} -extension Arrow_Flight_Protocol_FlightDescriptor.DescriptorType: @unchecked Sendable {} -extension Arrow_Flight_Protocol_FlightInfo: @unchecked Sendable {} -extension Arrow_Flight_Protocol_FlightEndpoint: @unchecked Sendable {} -extension Arrow_Flight_Protocol_Location: @unchecked Sendable {} -extension Arrow_Flight_Protocol_Ticket: @unchecked Sendable {} -extension Arrow_Flight_Protocol_FlightData: @unchecked Sendable {} -extension Arrow_Flight_Protocol_PutResult: @unchecked Sendable {} -#endif // swift(>=5.5) && canImport(_Concurrency) - -// MARK: - Code below here is support for the SwiftProtobuf runtime. - -fileprivate let _protobuf_package = "arrow.flight.protocol" - -extension Arrow_Flight_Protocol_CancelStatus: SwiftProtobuf._ProtoNameProviding { - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 0: .same(proto: "CANCEL_STATUS_UNSPECIFIED"), - 1: .same(proto: "CANCEL_STATUS_CANCELLED"), - 2: .same(proto: "CANCEL_STATUS_CANCELLING"), - 3: .same(proto: "CANCEL_STATUS_NOT_CANCELLABLE"), - ] -} - -extension Arrow_Flight_Protocol_HandshakeRequest: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - static let protoMessageName: String = _protobuf_package + ".HandshakeRequest" - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .standard(proto: "protocol_version"), - 2: .same(proto: "payload"), - ] - - mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every case branch when no optimizations are - // enabled. https://github.com/apple/swift-protobuf/issues/1034 - switch fieldNumber { - case 1: try { try decoder.decodeSingularUInt64Field(value: &self.protocolVersion) }() - case 2: try { try decoder.decodeSingularBytesField(value: &self.payload) }() - default: break - } - } - } - - func traverse(visitor: inout V) throws { - if self.protocolVersion != 0 { - try visitor.visitSingularUInt64Field(value: self.protocolVersion, fieldNumber: 1) - } - if !self.payload.isEmpty { - try visitor.visitSingularBytesField(value: self.payload, fieldNumber: 2) - } - try unknownFields.traverse(visitor: &visitor) - } - - static func ==(lhs: Arrow_Flight_Protocol_HandshakeRequest, rhs: Arrow_Flight_Protocol_HandshakeRequest) -> Bool { - if lhs.protocolVersion != rhs.protocolVersion {return false} - if lhs.payload != rhs.payload {return false} - if lhs.unknownFields != rhs.unknownFields {return false} - return true - } -} - -extension Arrow_Flight_Protocol_HandshakeResponse: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - static let protoMessageName: String = _protobuf_package + ".HandshakeResponse" - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .standard(proto: "protocol_version"), - 2: .same(proto: "payload"), - ] - - mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every case branch when no optimizations are - // enabled. https://github.com/apple/swift-protobuf/issues/1034 - switch fieldNumber { - case 1: try { try decoder.decodeSingularUInt64Field(value: &self.protocolVersion) }() - case 2: try { try decoder.decodeSingularBytesField(value: &self.payload) }() - default: break - } - } - } - - func traverse(visitor: inout V) throws { - if self.protocolVersion != 0 { - try visitor.visitSingularUInt64Field(value: self.protocolVersion, fieldNumber: 1) - } - if !self.payload.isEmpty { - try visitor.visitSingularBytesField(value: self.payload, fieldNumber: 2) - } - try unknownFields.traverse(visitor: &visitor) - } - - static func ==(lhs: Arrow_Flight_Protocol_HandshakeResponse, rhs: Arrow_Flight_Protocol_HandshakeResponse) -> Bool { - if lhs.protocolVersion != rhs.protocolVersion {return false} - if lhs.payload != rhs.payload {return false} - if lhs.unknownFields != rhs.unknownFields {return false} - return true - } -} - -extension Arrow_Flight_Protocol_BasicAuth: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - static let protoMessageName: String = _protobuf_package + ".BasicAuth" - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 2: .same(proto: "username"), - 3: .same(proto: "password"), - ] - - mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every case branch when no optimizations are - // enabled. https://github.com/apple/swift-protobuf/issues/1034 - switch fieldNumber { - case 2: try { try decoder.decodeSingularStringField(value: &self.username) }() - case 3: try { try decoder.decodeSingularStringField(value: &self.password) }() - default: break - } - } - } - - func traverse(visitor: inout V) throws { - if !self.username.isEmpty { - try visitor.visitSingularStringField(value: self.username, fieldNumber: 2) - } - if !self.password.isEmpty { - try visitor.visitSingularStringField(value: self.password, fieldNumber: 3) - } - try unknownFields.traverse(visitor: &visitor) - } - - static func ==(lhs: Arrow_Flight_Protocol_BasicAuth, rhs: Arrow_Flight_Protocol_BasicAuth) -> Bool { - if lhs.username != rhs.username {return false} - if lhs.password != rhs.password {return false} - if lhs.unknownFields != rhs.unknownFields {return false} - return true - } -} - -extension Arrow_Flight_Protocol_Empty: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - static let protoMessageName: String = _protobuf_package + ".Empty" - static let _protobuf_nameMap = SwiftProtobuf._NameMap() - - mutating func decodeMessage(decoder: inout D) throws { - while let _ = try decoder.nextFieldNumber() { - } - } - - func traverse(visitor: inout V) throws { - try unknownFields.traverse(visitor: &visitor) - } - - static func ==(lhs: Arrow_Flight_Protocol_Empty, rhs: Arrow_Flight_Protocol_Empty) -> Bool { - if lhs.unknownFields != rhs.unknownFields {return false} - return true - } -} - -extension Arrow_Flight_Protocol_ActionType: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - static let protoMessageName: String = _protobuf_package + ".ActionType" - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .same(proto: "type"), - 2: .same(proto: "description"), - ] - - mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every case branch when no optimizations are - // enabled. https://github.com/apple/swift-protobuf/issues/1034 - switch fieldNumber { - case 1: try { try decoder.decodeSingularStringField(value: &self.type) }() - case 2: try { try decoder.decodeSingularStringField(value: &self.description_p) }() - default: break - } - } - } - - func traverse(visitor: inout V) throws { - if !self.type.isEmpty { - try visitor.visitSingularStringField(value: self.type, fieldNumber: 1) - } - if !self.description_p.isEmpty { - try visitor.visitSingularStringField(value: self.description_p, fieldNumber: 2) - } - try unknownFields.traverse(visitor: &visitor) - } - - static func ==(lhs: Arrow_Flight_Protocol_ActionType, rhs: Arrow_Flight_Protocol_ActionType) -> Bool { - if lhs.type != rhs.type {return false} - if lhs.description_p != rhs.description_p {return false} - if lhs.unknownFields != rhs.unknownFields {return false} - return true - } -} - -extension Arrow_Flight_Protocol_Criteria: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - static let protoMessageName: String = _protobuf_package + ".Criteria" - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .same(proto: "expression"), - ] - - mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every case branch when no optimizations are - // enabled. https://github.com/apple/swift-protobuf/issues/1034 - switch fieldNumber { - case 1: try { try decoder.decodeSingularBytesField(value: &self.expression) }() - default: break - } - } - } - - func traverse(visitor: inout V) throws { - if !self.expression.isEmpty { - try visitor.visitSingularBytesField(value: self.expression, fieldNumber: 1) - } - try unknownFields.traverse(visitor: &visitor) - } - - static func ==(lhs: Arrow_Flight_Protocol_Criteria, rhs: Arrow_Flight_Protocol_Criteria) -> Bool { - if lhs.expression != rhs.expression {return false} - if lhs.unknownFields != rhs.unknownFields {return false} - return true - } -} - -extension Arrow_Flight_Protocol_Action: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - static let protoMessageName: String = _protobuf_package + ".Action" - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .same(proto: "type"), - 2: .same(proto: "body"), - ] - - mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every case branch when no optimizations are - // enabled. https://github.com/apple/swift-protobuf/issues/1034 - switch fieldNumber { - case 1: try { try decoder.decodeSingularStringField(value: &self.type) }() - case 2: try { try decoder.decodeSingularBytesField(value: &self.body) }() - default: break - } - } - } - - func traverse(visitor: inout V) throws { - if !self.type.isEmpty { - try visitor.visitSingularStringField(value: self.type, fieldNumber: 1) - } - if !self.body.isEmpty { - try visitor.visitSingularBytesField(value: self.body, fieldNumber: 2) - } - try unknownFields.traverse(visitor: &visitor) - } - - static func ==(lhs: Arrow_Flight_Protocol_Action, rhs: Arrow_Flight_Protocol_Action) -> Bool { - if lhs.type != rhs.type {return false} - if lhs.body != rhs.body {return false} - if lhs.unknownFields != rhs.unknownFields {return false} - return true - } -} - -extension Arrow_Flight_Protocol_CancelFlightInfoRequest: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - static let protoMessageName: String = _protobuf_package + ".CancelFlightInfoRequest" - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .same(proto: "info"), - ] - - mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every case branch when no optimizations are - // enabled. https://github.com/apple/swift-protobuf/issues/1034 - switch fieldNumber { - case 1: try { try decoder.decodeSingularMessageField(value: &self._info) }() - default: break - } - } - } - - func traverse(visitor: inout V) throws { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every if/case branch local when no optimizations - // are enabled. https://github.com/apple/swift-protobuf/issues/1034 and - // https://github.com/apple/swift-protobuf/issues/1182 - try { if let v = self._info { - try visitor.visitSingularMessageField(value: v, fieldNumber: 1) - } }() - try unknownFields.traverse(visitor: &visitor) - } - - static func ==(lhs: Arrow_Flight_Protocol_CancelFlightInfoRequest, rhs: Arrow_Flight_Protocol_CancelFlightInfoRequest) -> Bool { - if lhs._info != rhs._info {return false} - if lhs.unknownFields != rhs.unknownFields {return false} - return true - } -} - -extension Arrow_Flight_Protocol_RenewFlightEndpointRequest: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - static let protoMessageName: String = _protobuf_package + ".RenewFlightEndpointRequest" - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .same(proto: "endpoint"), - ] - - mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every case branch when no optimizations are - // enabled. https://github.com/apple/swift-protobuf/issues/1034 - switch fieldNumber { - case 1: try { try decoder.decodeSingularMessageField(value: &self._endpoint) }() - default: break - } - } - } - - func traverse(visitor: inout V) throws { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every if/case branch local when no optimizations - // are enabled. https://github.com/apple/swift-protobuf/issues/1034 and - // https://github.com/apple/swift-protobuf/issues/1182 - try { if let v = self._endpoint { - try visitor.visitSingularMessageField(value: v, fieldNumber: 1) - } }() - try unknownFields.traverse(visitor: &visitor) - } - - static func ==(lhs: Arrow_Flight_Protocol_RenewFlightEndpointRequest, rhs: Arrow_Flight_Protocol_RenewFlightEndpointRequest) -> Bool { - if lhs._endpoint != rhs._endpoint {return false} - if lhs.unknownFields != rhs.unknownFields {return false} - return true - } -} - -extension Arrow_Flight_Protocol_Result: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - static let protoMessageName: String = _protobuf_package + ".Result" - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .same(proto: "body"), - ] - - mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every case branch when no optimizations are - // enabled. https://github.com/apple/swift-protobuf/issues/1034 - switch fieldNumber { - case 1: try { try decoder.decodeSingularBytesField(value: &self.body) }() - default: break - } - } - } - - func traverse(visitor: inout V) throws { - if !self.body.isEmpty { - try visitor.visitSingularBytesField(value: self.body, fieldNumber: 1) - } - try unknownFields.traverse(visitor: &visitor) - } - - static func ==(lhs: Arrow_Flight_Protocol_Result, rhs: Arrow_Flight_Protocol_Result) -> Bool { - if lhs.body != rhs.body {return false} - if lhs.unknownFields != rhs.unknownFields {return false} - return true - } -} - -extension Arrow_Flight_Protocol_CancelFlightInfoResult: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - static let protoMessageName: String = _protobuf_package + ".CancelFlightInfoResult" - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .same(proto: "status"), - ] - - mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every case branch when no optimizations are - // enabled. https://github.com/apple/swift-protobuf/issues/1034 - switch fieldNumber { - case 1: try { try decoder.decodeSingularEnumField(value: &self.status) }() - default: break - } - } - } - - func traverse(visitor: inout V) throws { - if self.status != .unspecified { - try visitor.visitSingularEnumField(value: self.status, fieldNumber: 1) - } - try unknownFields.traverse(visitor: &visitor) - } - - static func ==(lhs: Arrow_Flight_Protocol_CancelFlightInfoResult, rhs: Arrow_Flight_Protocol_CancelFlightInfoResult) -> Bool { - if lhs.status != rhs.status {return false} - if lhs.unknownFields != rhs.unknownFields {return false} - return true - } -} - -extension Arrow_Flight_Protocol_SchemaResult: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - static let protoMessageName: String = _protobuf_package + ".SchemaResult" - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .same(proto: "schema"), - ] - - mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every case branch when no optimizations are - // enabled. https://github.com/apple/swift-protobuf/issues/1034 - switch fieldNumber { - case 1: try { try decoder.decodeSingularBytesField(value: &self.schema) }() - default: break - } - } - } - - func traverse(visitor: inout V) throws { - if !self.schema.isEmpty { - try visitor.visitSingularBytesField(value: self.schema, fieldNumber: 1) - } - try unknownFields.traverse(visitor: &visitor) - } - - static func ==(lhs: Arrow_Flight_Protocol_SchemaResult, rhs: Arrow_Flight_Protocol_SchemaResult) -> Bool { - if lhs.schema != rhs.schema {return false} - if lhs.unknownFields != rhs.unknownFields {return false} - return true - } -} - -extension Arrow_Flight_Protocol_FlightDescriptor: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - static let protoMessageName: String = _protobuf_package + ".FlightDescriptor" - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .same(proto: "type"), - 2: .same(proto: "cmd"), - 3: .same(proto: "path"), - ] - - mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every case branch when no optimizations are - // enabled. https://github.com/apple/swift-protobuf/issues/1034 - switch fieldNumber { - case 1: try { try decoder.decodeSingularEnumField(value: &self.type) }() - case 2: try { try decoder.decodeSingularBytesField(value: &self.cmd) }() - case 3: try { try decoder.decodeRepeatedStringField(value: &self.path) }() - default: break - } - } - } - - func traverse(visitor: inout V) throws { - if self.type != .unknown { - try visitor.visitSingularEnumField(value: self.type, fieldNumber: 1) - } - if !self.cmd.isEmpty { - try visitor.visitSingularBytesField(value: self.cmd, fieldNumber: 2) - } - if !self.path.isEmpty { - try visitor.visitRepeatedStringField(value: self.path, fieldNumber: 3) - } - try unknownFields.traverse(visitor: &visitor) - } - - static func ==(lhs: Arrow_Flight_Protocol_FlightDescriptor, rhs: Arrow_Flight_Protocol_FlightDescriptor) -> Bool { - if lhs.type != rhs.type {return false} - if lhs.cmd != rhs.cmd {return false} - if lhs.path != rhs.path {return false} - if lhs.unknownFields != rhs.unknownFields {return false} - return true - } -} - -extension Arrow_Flight_Protocol_FlightDescriptor.DescriptorType: SwiftProtobuf._ProtoNameProviding { - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 0: .same(proto: "UNKNOWN"), - 1: .same(proto: "PATH"), - 2: .same(proto: "CMD"), - ] -} - -extension Arrow_Flight_Protocol_FlightInfo: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - static let protoMessageName: String = _protobuf_package + ".FlightInfo" - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .same(proto: "schema"), - 2: .standard(proto: "flight_descriptor"), - 3: .same(proto: "endpoint"), - 4: .standard(proto: "total_records"), - 5: .standard(proto: "total_bytes"), - 6: .same(proto: "ordered"), - ] - - mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every case branch when no optimizations are - // enabled. https://github.com/apple/swift-protobuf/issues/1034 - switch fieldNumber { - case 1: try { try decoder.decodeSingularBytesField(value: &self.schema) }() - case 2: try { try decoder.decodeSingularMessageField(value: &self._flightDescriptor) }() - case 3: try { try decoder.decodeRepeatedMessageField(value: &self.endpoint) }() - case 4: try { try decoder.decodeSingularInt64Field(value: &self.totalRecords) }() - case 5: try { try decoder.decodeSingularInt64Field(value: &self.totalBytes) }() - case 6: try { try decoder.decodeSingularBoolField(value: &self.ordered) }() - default: break - } - } - } - - func traverse(visitor: inout V) throws { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every if/case branch local when no optimizations - // are enabled. https://github.com/apple/swift-protobuf/issues/1034 and - // https://github.com/apple/swift-protobuf/issues/1182 - if !self.schema.isEmpty { - try visitor.visitSingularBytesField(value: self.schema, fieldNumber: 1) - } - try { if let v = self._flightDescriptor { - try visitor.visitSingularMessageField(value: v, fieldNumber: 2) - } }() - if !self.endpoint.isEmpty { - try visitor.visitRepeatedMessageField(value: self.endpoint, fieldNumber: 3) - } - if self.totalRecords != 0 { - try visitor.visitSingularInt64Field(value: self.totalRecords, fieldNumber: 4) - } - if self.totalBytes != 0 { - try visitor.visitSingularInt64Field(value: self.totalBytes, fieldNumber: 5) - } - if self.ordered != false { - try visitor.visitSingularBoolField(value: self.ordered, fieldNumber: 6) - } - try unknownFields.traverse(visitor: &visitor) - } - - static func ==(lhs: Arrow_Flight_Protocol_FlightInfo, rhs: Arrow_Flight_Protocol_FlightInfo) -> Bool { - if lhs.schema != rhs.schema {return false} - if lhs._flightDescriptor != rhs._flightDescriptor {return false} - if lhs.endpoint != rhs.endpoint {return false} - if lhs.totalRecords != rhs.totalRecords {return false} - if lhs.totalBytes != rhs.totalBytes {return false} - if lhs.ordered != rhs.ordered {return false} - if lhs.unknownFields != rhs.unknownFields {return false} - return true - } -} - -extension Arrow_Flight_Protocol_FlightEndpoint: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - static let protoMessageName: String = _protobuf_package + ".FlightEndpoint" - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .same(proto: "ticket"), - 2: .same(proto: "location"), - 3: .standard(proto: "expiration_time"), - ] - - mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every case branch when no optimizations are - // enabled. https://github.com/apple/swift-protobuf/issues/1034 - switch fieldNumber { - case 1: try { try decoder.decodeSingularMessageField(value: &self._ticket) }() - case 2: try { try decoder.decodeRepeatedMessageField(value: &self.location) }() - case 3: try { try decoder.decodeSingularMessageField(value: &self._expirationTime) }() - default: break - } - } - } - - func traverse(visitor: inout V) throws { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every if/case branch local when no optimizations - // are enabled. https://github.com/apple/swift-protobuf/issues/1034 and - // https://github.com/apple/swift-protobuf/issues/1182 - try { if let v = self._ticket { - try visitor.visitSingularMessageField(value: v, fieldNumber: 1) - } }() - if !self.location.isEmpty { - try visitor.visitRepeatedMessageField(value: self.location, fieldNumber: 2) - } - try { if let v = self._expirationTime { - try visitor.visitSingularMessageField(value: v, fieldNumber: 3) - } }() - try unknownFields.traverse(visitor: &visitor) - } - - static func ==(lhs: Arrow_Flight_Protocol_FlightEndpoint, rhs: Arrow_Flight_Protocol_FlightEndpoint) -> Bool { - if lhs._ticket != rhs._ticket {return false} - if lhs.location != rhs.location {return false} - if lhs._expirationTime != rhs._expirationTime {return false} - if lhs.unknownFields != rhs.unknownFields {return false} - return true - } -} - -extension Arrow_Flight_Protocol_Location: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - static let protoMessageName: String = _protobuf_package + ".Location" - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .same(proto: "uri"), - ] - - mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every case branch when no optimizations are - // enabled. https://github.com/apple/swift-protobuf/issues/1034 - switch fieldNumber { - case 1: try { try decoder.decodeSingularStringField(value: &self.uri) }() - default: break - } - } - } - - func traverse(visitor: inout V) throws { - if !self.uri.isEmpty { - try visitor.visitSingularStringField(value: self.uri, fieldNumber: 1) - } - try unknownFields.traverse(visitor: &visitor) - } - - static func ==(lhs: Arrow_Flight_Protocol_Location, rhs: Arrow_Flight_Protocol_Location) -> Bool { - if lhs.uri != rhs.uri {return false} - if lhs.unknownFields != rhs.unknownFields {return false} - return true - } -} - -extension Arrow_Flight_Protocol_Ticket: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - static let protoMessageName: String = _protobuf_package + ".Ticket" - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .same(proto: "ticket"), - ] - - mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every case branch when no optimizations are - // enabled. https://github.com/apple/swift-protobuf/issues/1034 - switch fieldNumber { - case 1: try { try decoder.decodeSingularBytesField(value: &self.ticket) }() - default: break - } - } - } - - func traverse(visitor: inout V) throws { - if !self.ticket.isEmpty { - try visitor.visitSingularBytesField(value: self.ticket, fieldNumber: 1) - } - try unknownFields.traverse(visitor: &visitor) - } - - static func ==(lhs: Arrow_Flight_Protocol_Ticket, rhs: Arrow_Flight_Protocol_Ticket) -> Bool { - if lhs.ticket != rhs.ticket {return false} - if lhs.unknownFields != rhs.unknownFields {return false} - return true - } -} - -extension Arrow_Flight_Protocol_FlightData: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - static let protoMessageName: String = _protobuf_package + ".FlightData" - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .standard(proto: "flight_descriptor"), - 2: .standard(proto: "data_header"), - 3: .standard(proto: "app_metadata"), - 1000: .standard(proto: "data_body"), - ] - - mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every case branch when no optimizations are - // enabled. https://github.com/apple/swift-protobuf/issues/1034 - switch fieldNumber { - case 1: try { try decoder.decodeSingularMessageField(value: &self._flightDescriptor) }() - case 2: try { try decoder.decodeSingularBytesField(value: &self.dataHeader) }() - case 3: try { try decoder.decodeSingularBytesField(value: &self.appMetadata) }() - case 1000: try { try decoder.decodeSingularBytesField(value: &self.dataBody) }() - default: break - } - } - } - - func traverse(visitor: inout V) throws { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every if/case branch local when no optimizations - // are enabled. https://github.com/apple/swift-protobuf/issues/1034 and - // https://github.com/apple/swift-protobuf/issues/1182 - try { if let v = self._flightDescriptor { - try visitor.visitSingularMessageField(value: v, fieldNumber: 1) - } }() - if !self.dataHeader.isEmpty { - try visitor.visitSingularBytesField(value: self.dataHeader, fieldNumber: 2) - } - if !self.appMetadata.isEmpty { - try visitor.visitSingularBytesField(value: self.appMetadata, fieldNumber: 3) - } - if !self.dataBody.isEmpty { - try visitor.visitSingularBytesField(value: self.dataBody, fieldNumber: 1000) - } - try unknownFields.traverse(visitor: &visitor) - } - - static func ==(lhs: Arrow_Flight_Protocol_FlightData, rhs: Arrow_Flight_Protocol_FlightData) -> Bool { - if lhs._flightDescriptor != rhs._flightDescriptor {return false} - if lhs.dataHeader != rhs.dataHeader {return false} - if lhs.appMetadata != rhs.appMetadata {return false} - if lhs.dataBody != rhs.dataBody {return false} - if lhs.unknownFields != rhs.unknownFields {return false} - return true - } -} - -extension Arrow_Flight_Protocol_PutResult: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - static let protoMessageName: String = _protobuf_package + ".PutResult" - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .standard(proto: "app_metadata"), - ] - - mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every case branch when no optimizations are - // enabled. https://github.com/apple/swift-protobuf/issues/1034 - switch fieldNumber { - case 1: try { try decoder.decodeSingularBytesField(value: &self.appMetadata) }() - default: break - } - } - } - - func traverse(visitor: inout V) throws { - if !self.appMetadata.isEmpty { - try visitor.visitSingularBytesField(value: self.appMetadata, fieldNumber: 1) - } - try unknownFields.traverse(visitor: &visitor) - } - - static func ==(lhs: Arrow_Flight_Protocol_PutResult, rhs: Arrow_Flight_Protocol_PutResult) -> Bool { - if lhs.appMetadata != rhs.appMetadata {return false} - if lhs.unknownFields != rhs.unknownFields {return false} - return true - } -} diff --git a/swift/ArrowFlight/Sources/ArrowFlight/FlightAction.swift b/swift/ArrowFlight/Sources/ArrowFlight/FlightAction.swift deleted file mode 100644 index 8db12aaa99f..00000000000 --- a/swift/ArrowFlight/Sources/ArrowFlight/FlightAction.swift +++ /dev/null @@ -1,39 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -import Foundation - -public class FlightAction { - public let type: String - public let body: Data - init(_ action: Arrow_Flight_Protocol_Action) { - self.type = action.type - self.body = action.body - } - - public init(_ type: String, body: Data = Data()) { - self.type = type - self.body = body - } - - func toProtocol() -> Arrow_Flight_Protocol_Action { - var flightAction = Arrow_Flight_Protocol_Action() - flightAction.type = self.type - flightAction.body = self.body - return flightAction - } -} diff --git a/swift/ArrowFlight/Sources/ArrowFlight/FlightActionType.swift b/swift/ArrowFlight/Sources/ArrowFlight/FlightActionType.swift deleted file mode 100644 index 0b4778c6864..00000000000 --- a/swift/ArrowFlight/Sources/ArrowFlight/FlightActionType.swift +++ /dev/null @@ -1,38 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -import Foundation -public class FlightActionType { - public let type: String - public let description: String - init(_ actionType: Arrow_Flight_Protocol_ActionType) { - self.type = actionType.type - self.description = actionType.description_p - - } - public init(_ type: String, description: String) { - self.type = type - self.description = description - } - - func toProtocol() -> Arrow_Flight_Protocol_ActionType { - var actionType = Arrow_Flight_Protocol_ActionType() - actionType.type = self.type - actionType.description_p = self.description - return actionType - } -} diff --git a/swift/ArrowFlight/Sources/ArrowFlight/FlightClient.swift b/swift/ArrowFlight/Sources/ArrowFlight/FlightClient.swift deleted file mode 100644 index ef3e4fa239e..00000000000 --- a/swift/ArrowFlight/Sources/ArrowFlight/FlightClient.swift +++ /dev/null @@ -1,187 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -import struct Foundation.Data -import struct Foundation.URL -import GRPC -import NIOCore -import NIOPosix -import Arrow - -public class FlightClient { - let client: Arrow_Flight_Protocol_FlightServiceAsyncClient - let allowReadingUnalignedBuffers: Bool - - public init(channel: GRPCChannel, allowReadingUnalignedBuffers: Bool = false ) { - client = Arrow_Flight_Protocol_FlightServiceAsyncClient(channel: channel) - self.allowReadingUnalignedBuffers = allowReadingUnalignedBuffers - } - - private func readMessages( - _ responseStream: GRPCAsyncResponseStream - ) async throws -> ArrowReader.ArrowReaderResult { - let reader = ArrowReader() - let arrowResult = ArrowReader.makeArrowReaderResult() - for try await data in responseStream { - switch reader.fromMessage( - data.dataHeader, - dataBody: data.dataBody, - result: arrowResult, - useUnalignedBuffers: allowReadingUnalignedBuffers) { - case .success: - continue - case .failure(let error): - throw error - } - } - - return arrowResult - } - - private func writeBatches( - _ requestStream: GRPCAsyncRequestStreamWriter, - descriptor: FlightDescriptor, - recordBatches: [RecordBatch] - ) async throws { - let writer = ArrowWriter() - switch writer.toMessage(recordBatches[0].schema) { - case .success(let schemaData): - try await requestStream.send( - FlightData( - schemaData, - dataBody: Data(), - flightDescriptor: descriptor).toProtocol()) - for recordBatch in recordBatches { - switch writer.toMessage(recordBatch) { - case .success(let data): - try await requestStream.send( - FlightData( - data[0], - dataBody: data[1], - flightDescriptor: descriptor).toProtocol()) - case .failure(let error): - throw error - } - } - requestStream.finish() - case .failure(let error): - throw error - } - } - - public func listActions(_ closure: (FlightActionType) -> Void) async throws { - let listActions = client.makeListActionsCall(Arrow_Flight_Protocol_Empty()) - for try await data in listActions.responseStream { - closure(FlightActionType(data)) - } - } - - public func listFlights( - _ criteria: FlightCriteria, - closure: (FlightInfo) throws -> Void) async throws { - let listFlights = client.makeListFlightsCall(criteria.toProtocol()) - for try await data in listFlights.responseStream { - try closure(FlightInfo(data)) - } - } - - public func doAction(_ action: FlightAction, closure: (FlightResult) throws -> Void) async throws { - let actionResponse = client.makeDoActionCall(action.toProtocol()) - for try await data in actionResponse.responseStream { - try closure(FlightResult(data)) - } - } - - public func getSchema(_ descriptor: FlightDescriptor) async throws -> FlightSchemaResult { - let schemaResultResponse = client.makeGetSchemaCall(descriptor.toProtocol()) - return FlightSchemaResult(try await schemaResultResponse.response) - } - - public func doGet( - _ ticket: FlightTicket, - readerResultClosure: (ArrowReader.ArrowReaderResult) throws -> Void) async throws { - let getResult = client.makeDoGetCall(ticket.toProtocol()) - try readerResultClosure(try await readMessages(getResult.responseStream)) - } - - public func doGet( - _ ticket: FlightTicket, - flightDataClosure: (FlightData) throws -> Void) async throws { - let getResult = client.makeDoGetCall(ticket.toProtocol()) - for try await data in getResult.responseStream { - try flightDataClosure(FlightData(data)) - } - } - - public func doPut( - _ descriptor: FlightDescriptor, - recordBatches: [RecordBatch], - closure: (FlightPutResult) throws -> Void) async throws { - if recordBatches.isEmpty { - throw ArrowFlightError.emptyCollection - } - - let putCall = client.makeDoPutCall() - try await writeBatches(putCall.requestStream, descriptor: descriptor, recordBatches: recordBatches) - var closureCalled = false - for try await response in putCall.responseStream { - try closure(FlightPutResult(response)) - closureCalled = true - } - - if !closureCalled { - try closure(FlightPutResult()) - } - } - - public func doPut(_ flightData: FlightData, closure: (FlightPutResult) throws -> Void) async throws { - let putCall = client.makeDoPutCall() - try await putCall.requestStream.send(flightData.toProtocol()) - putCall.requestStream.finish() - var closureCalled = false - for try await response in putCall.responseStream { - try closure(FlightPutResult(response)) - closureCalled = true - } - - if !closureCalled { - try closure(FlightPutResult()) - } - } - - public func doExchange( - _ descriptor: FlightDescriptor, - recordBatches: [RecordBatch], - closure: (ArrowReader.ArrowReaderResult) throws -> Void) async throws { - if recordBatches.isEmpty { - throw ArrowFlightError.emptyCollection - } - - let exchangeCall = client.makeDoExchangeCall() - try await writeBatches(exchangeCall.requestStream, descriptor: descriptor, recordBatches: recordBatches) - try closure(try await readMessages(exchangeCall.responseStream)) - } - - public func doExchange(flightData: FlightData, closure: (FlightData) throws -> Void) async throws { - let exchangeCall = client.makeDoExchangeCall() - try await exchangeCall.requestStream.send(flightData.toProtocol()) - exchangeCall.requestStream.finish() - for try await response in exchangeCall.responseStream { - try closure(FlightData(response)) - } - } -} diff --git a/swift/ArrowFlight/Sources/ArrowFlight/FlightCriteria.swift b/swift/ArrowFlight/Sources/ArrowFlight/FlightCriteria.swift deleted file mode 100644 index 2d02959998c..00000000000 --- a/swift/ArrowFlight/Sources/ArrowFlight/FlightCriteria.swift +++ /dev/null @@ -1,37 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -import Foundation - -public class FlightCriteria { - let criteria: Arrow_Flight_Protocol_Criteria - - public var expression: Data { criteria.expression } - public init(_ expression: Data = Data()) { - criteria = Arrow_Flight_Protocol_Criteria.with { - $0.expression = expression - } - } - - init(_ criteria: Arrow_Flight_Protocol_Criteria) { - self.criteria = criteria - } - - func toProtocol() -> Arrow_Flight_Protocol_Criteria { - return criteria - } -} diff --git a/swift/ArrowFlight/Sources/ArrowFlight/FlightData.swift b/swift/ArrowFlight/Sources/ArrowFlight/FlightData.swift deleted file mode 100644 index 84db8c57183..00000000000 --- a/swift/ArrowFlight/Sources/ArrowFlight/FlightData.swift +++ /dev/null @@ -1,49 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -import Foundation - -public class FlightData { - let flightData: Arrow_Flight_Protocol_FlightData - public var flightDescriptor: FlightDescriptor? { - return flightData.hasFlightDescriptor ? FlightDescriptor(flightData.flightDescriptor) : nil - } - - public var dataHeader: Data { flightData.dataHeader } - - public var dataBody: Data { flightData.dataBody } - - init(_ flightData: Arrow_Flight_Protocol_FlightData) { - self.flightData = flightData - } - - public init(_ dataHeader: Data, dataBody: Data, flightDescriptor: FlightDescriptor? = nil) { - if flightDescriptor != nil { - self.flightData = Arrow_Flight_Protocol_FlightData.with { - $0.dataHeader = dataHeader - $0.dataBody = dataBody - $0.flightDescriptor = flightDescriptor!.toProtocol() - } - } else { - self.flightData = Arrow_Flight_Protocol_FlightData.with { - $0.dataBody = dataBody - } - } - } - - func toProtocol() -> Arrow_Flight_Protocol_FlightData { self.flightData } -} diff --git a/swift/ArrowFlight/Sources/ArrowFlight/FlightDescriptor.swift b/swift/ArrowFlight/Sources/ArrowFlight/FlightDescriptor.swift deleted file mode 100644 index 02712aaa099..00000000000 --- a/swift/ArrowFlight/Sources/ArrowFlight/FlightDescriptor.swift +++ /dev/null @@ -1,56 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -import Foundation - -public class FlightDescriptor { - public enum DescriptorType { - case unknown - case path - case cmd - } - - public let type: FlightDescriptor.DescriptorType - public let cmd: Data - public let paths: [String] - - init(_ descriptor: Arrow_Flight_Protocol_FlightDescriptor) { - self.type = descriptor.type == .cmd ? .cmd : .path - self.cmd = descriptor.cmd - self.paths = descriptor.path - } - - public init(cmd: Data) { - self.type = .cmd - self.cmd = cmd - self.paths = [String]() - } - - public init(paths: [String]) { - self.type = .path - self.cmd = Data() - self.paths = paths - } - - func toProtocol() -> Arrow_Flight_Protocol_FlightDescriptor { - var descriptor = Arrow_Flight_Protocol_FlightDescriptor() - descriptor.type = self.type == .cmd ? .cmd : .path - descriptor.cmd = self.cmd - descriptor.path = self.paths - return descriptor - } -} diff --git a/swift/ArrowFlight/Sources/ArrowFlight/FlightEndpoint.swift b/swift/ArrowFlight/Sources/ArrowFlight/FlightEndpoint.swift deleted file mode 100644 index 0493772781a..00000000000 --- a/swift/ArrowFlight/Sources/ArrowFlight/FlightEndpoint.swift +++ /dev/null @@ -1,38 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -import Foundation -public class FlightEndpoint { - let ticket: FlightTicket - let locations: [FlightLocation] - init(_ endpoint: Arrow_Flight_Protocol_FlightEndpoint) { - self.ticket = FlightTicket(endpoint.ticket.ticket) - self.locations = endpoint.location.map {return FlightLocation($0)} - } - - public init(_ ticket: FlightTicket, locations: [FlightLocation]) { - self.ticket = ticket - self.locations = locations - } - - func toProtocol() -> Arrow_Flight_Protocol_FlightEndpoint { - var endpoint = Arrow_Flight_Protocol_FlightEndpoint() - endpoint.ticket = self.ticket.toProtocol() - endpoint.location = self.locations.map { $0.toProtocol() } - return endpoint - } -} diff --git a/swift/ArrowFlight/Sources/ArrowFlight/FlightInfo.swift b/swift/ArrowFlight/Sources/ArrowFlight/FlightInfo.swift deleted file mode 100644 index eb43aa34caf..00000000000 --- a/swift/ArrowFlight/Sources/ArrowFlight/FlightInfo.swift +++ /dev/null @@ -1,57 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -import Foundation -import Arrow - -public class FlightInfo { - let flightInfo: Arrow_Flight_Protocol_FlightInfo - public var flightDescriptor: FlightDescriptor? { - return flightInfo.hasFlightDescriptor ? FlightDescriptor(flightInfo.flightDescriptor) : nil - } - - public var endpoints: [FlightEndpoint] { - return self.flightInfo.endpoint.map { FlightEndpoint($0) } - } - public var schema: ArrowSchema? { - return schemaFromMessage(self.flightInfo.schema) - } - - var endpoint: [Arrow_Flight_Protocol_FlightEndpoint] = [] - init(_ flightInfo: Arrow_Flight_Protocol_FlightInfo) { - self.flightInfo = flightInfo - } - - public init(_ schema: Data, endpoints: [FlightEndpoint] = [FlightEndpoint](), descriptor: FlightDescriptor? = nil) { - if let localDescriptor = descriptor { - self.flightInfo = Arrow_Flight_Protocol_FlightInfo.with { - $0.schema = schema - $0.flightDescriptor = localDescriptor.toProtocol() - $0.endpoint = endpoints.map { $0.toProtocol() } - } - } else { - self.flightInfo = Arrow_Flight_Protocol_FlightInfo.with { - $0.schema = schema - $0.endpoint = endpoints.map { $0.toProtocol() } - } - } - } - - func toProtocol() -> Arrow_Flight_Protocol_FlightInfo { - return self.flightInfo - } -} diff --git a/swift/ArrowFlight/Sources/ArrowFlight/FlightLocation.swift b/swift/ArrowFlight/Sources/ArrowFlight/FlightLocation.swift deleted file mode 100644 index 9c89d100336..00000000000 --- a/swift/ArrowFlight/Sources/ArrowFlight/FlightLocation.swift +++ /dev/null @@ -1,36 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -import Foundation - -public class FlightLocation { - public let uri: String - - init(_ location: Arrow_Flight_Protocol_Location) { - self.uri = location.uri - } - - public init(_ uri: String) { - self.uri = uri - } - - func toProtocol() -> Arrow_Flight_Protocol_Location { - var location = Arrow_Flight_Protocol_Location() - location.uri = uri - return location - } -} diff --git a/swift/ArrowFlight/Sources/ArrowFlight/FlightPutResult.swift b/swift/ArrowFlight/Sources/ArrowFlight/FlightPutResult.swift deleted file mode 100644 index 3b22f8f0baf..00000000000 --- a/swift/ArrowFlight/Sources/ArrowFlight/FlightPutResult.swift +++ /dev/null @@ -1,35 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -import Foundation - -public class FlightPutResult { - public let appMetadata: Data - public init(_ appMetadata: Data = Data()) { - self.appMetadata = appMetadata - } - - init(_ putResult: Arrow_Flight_Protocol_PutResult) { - self.appMetadata = putResult.appMetadata - } - - func toProtocol() -> Arrow_Flight_Protocol_PutResult { - var putResult = Arrow_Flight_Protocol_PutResult() - putResult.appMetadata = self.appMetadata - return putResult - } -} diff --git a/swift/ArrowFlight/Sources/ArrowFlight/FlightResult.swift b/swift/ArrowFlight/Sources/ArrowFlight/FlightResult.swift deleted file mode 100644 index d7cf828b963..00000000000 --- a/swift/ArrowFlight/Sources/ArrowFlight/FlightResult.swift +++ /dev/null @@ -1,35 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -import Foundation - -public class FlightResult { - public let body: Data - init(_ result: Arrow_Flight_Protocol_Result) { - self.body = result.body - } - - public init(_ body: Data) { - self.body = body - } - - func toProtocol() -> Arrow_Flight_Protocol_Result { - var result = Arrow_Flight_Protocol_Result() - result.body = self.body - return result - } -} diff --git a/swift/ArrowFlight/Sources/ArrowFlight/FlightSchemaResult.swift b/swift/ArrowFlight/Sources/ArrowFlight/FlightSchemaResult.swift deleted file mode 100644 index 7dea98a9988..00000000000 --- a/swift/ArrowFlight/Sources/ArrowFlight/FlightSchemaResult.swift +++ /dev/null @@ -1,41 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -import Foundation -import Arrow - -public class FlightSchemaResult { - let schemaResult: Arrow_Flight_Protocol_SchemaResult - - public var schema: ArrowSchema? { - return schemaFromMessage(self.schemaResult.schema) - } - - public init(_ schema: Data) { - self.schemaResult = Arrow_Flight_Protocol_SchemaResult.with { - $0.schema = schema - } - } - - init(_ schemaResult: Arrow_Flight_Protocol_SchemaResult) { - self.schemaResult = schemaResult - } - - func toProtocol() -> Arrow_Flight_Protocol_SchemaResult { - return schemaResult - } -} diff --git a/swift/ArrowFlight/Sources/ArrowFlight/FlightServer.swift b/swift/ArrowFlight/Sources/ArrowFlight/FlightServer.swift deleted file mode 100644 index 19644d632e9..00000000000 --- a/swift/ArrowFlight/Sources/ArrowFlight/FlightServer.swift +++ /dev/null @@ -1,194 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -import Foundation -import GRPC -import NIO -import NIOConcurrencyHelpers -import SwiftProtobuf -import Arrow - -public enum ArrowFlightError: Error { - case unknown(String?) - case notImplemented(String? = nil) - case emptyCollection - case ioError(String? = nil) -} - -public func schemaToMessage(_ schema: ArrowSchema) throws -> Data { - let arrowWriter = ArrowWriter() - switch arrowWriter.toMessage(schema) { - case .success(let result): - var outputResult = Data() - withUnsafeBytes(of: Int32(0).littleEndian) {outputResult.append(Data($0))} - withUnsafeBytes(of: Int32(result.count).littleEndian) {outputResult.append(Data($0))} - outputResult.append(result) - return outputResult - case .failure(let error): - throw error - } -} - -public func schemaFromMessage(_ schemaData: Data) -> ArrowSchema? { - let messageLength = schemaData.withUnsafeBytes { rawBuffer in - rawBuffer.loadUnaligned(fromByteOffset: 4, as: Int32.self) - } - - let startIndex = schemaData.count - Int(messageLength) - let schema = schemaData[startIndex...] - - let reader = ArrowReader() - let result = ArrowReader.makeArrowReaderResult() - switch reader.fromMessage(schema, dataBody: Data(), result: result) { - case .success: - return result.schema! - case .failure: - // TODO: add handling of error swiftlint:disable:this todo - return nil - } -} - -public protocol ArrowFlightServer: Sendable { - var allowReadingUnalignedBuffers: Bool { get } - func listFlights(_ criteria: FlightCriteria, writer: FlightInfoStreamWriter) async throws - func getFlightInfo(_ request: FlightDescriptor) async throws -> FlightInfo - func getSchema(_ request: FlightDescriptor) async throws -> ArrowFlight.FlightSchemaResult - func listActions(_ writer: ActionTypeStreamWriter) async throws - func doAction(_ action: FlightAction, writer: ResultStreamWriter) async throws - func doGet(_ ticket: FlightTicket, writer: RecordBatchStreamWriter) async throws - func doPut(_ reader: RecordBatchStreamReader, writer: PutResultDataStreamWriter) async throws - func doExchange(_ reader: RecordBatchStreamReader, writer: RecordBatchStreamWriter) async throws -} - -extension ArrowFlightServer { - var allowReadingUnalignedBuffers: Bool { - return false - } -} - -public func makeFlightServer(_ handler: ArrowFlightServer) -> CallHandlerProvider { - return InternalFlightServer(handler) -} - -internal final class InternalFlightServer: Arrow_Flight_Protocol_FlightServiceAsyncProvider { - let arrowFlightServer: ArrowFlightServer? - - init(_ arrowFlightServer: ArrowFlightServer?) { - self.arrowFlightServer = arrowFlightServer - } - - func handshake(requestStream: GRPC.GRPCAsyncRequestStream, - responseStream: GRPC.GRPCAsyncResponseStreamWriter, - context: GRPC.GRPCAsyncServerCallContext) async throws { - throw ArrowFlightError.notImplemented() - } - - func listFlights(request: Arrow_Flight_Protocol_Criteria, - responseStream: GRPC.GRPCAsyncResponseStreamWriter, - context: GRPC.GRPCAsyncServerCallContext) async throws { - if let server = arrowFlightServer { - let writer = FlightInfoStreamWriter(responseStream) - try await server.listFlights(FlightCriteria(request), writer: writer) - return - } - - throw ArrowFlightError.notImplemented() - } - - func getFlightInfo(request: Arrow_Flight_Protocol_FlightDescriptor, - context: GRPC.GRPCAsyncServerCallContext) async throws -> Arrow_Flight_Protocol_FlightInfo { - if let server = arrowFlightServer { - return try await server.getFlightInfo(FlightDescriptor(request)).toProtocol() - } - - throw ArrowFlightError.notImplemented() - } - - func getSchema(request: Arrow_Flight_Protocol_FlightDescriptor, - context: GRPC.GRPCAsyncServerCallContext) async throws -> Arrow_Flight_Protocol_SchemaResult { - if let server = arrowFlightServer { - return try await server.getSchema(FlightDescriptor(request)).toProtocol() - } - - throw ArrowFlightError.notImplemented() - } - - func doGet(request: Arrow_Flight_Protocol_Ticket, - responseStream: GRPC.GRPCAsyncResponseStreamWriter, - context: GRPC.GRPCAsyncServerCallContext) async throws { - if let server = arrowFlightServer { - let writer = RecordBatchStreamWriter(responseStream) - let ticket = FlightTicket(request) - try await server.doGet(ticket, writer: writer) - return - } - - throw ArrowFlightError.notImplemented() - } - - func doPut(requestStream: GRPC.GRPCAsyncRequestStream, - responseStream: GRPC.GRPCAsyncResponseStreamWriter, - context: GRPC.GRPCAsyncServerCallContext) async throws { - if let server = arrowFlightServer { - let reader = RecordBatchStreamReader(requestStream) - let writer = PutResultDataStreamWriter(responseStream) - try await server.doPut(reader, writer: writer) - return - } - - throw ArrowFlightError.notImplemented() - } - - func doExchange(requestStream: GRPC.GRPCAsyncRequestStream, - responseStream: GRPC.GRPCAsyncResponseStreamWriter, - context: GRPC.GRPCAsyncServerCallContext) async throws { - if let server = arrowFlightServer { - let reader = RecordBatchStreamReader(requestStream) - let writer = RecordBatchStreamWriter(responseStream) - try await server.doExchange(reader, writer: writer) - return - } - - throw ArrowFlightError.notImplemented() - } - - func doAction(request: Arrow_Flight_Protocol_Action, - responseStream: GRPC.GRPCAsyncResponseStreamWriter, - context: GRPC.GRPCAsyncServerCallContext) async throws { - if let server = arrowFlightServer { - try await server.doAction(FlightAction(request), writer: ResultStreamWriter(responseStream)) - return - } - - throw ArrowFlightError.notImplemented() - } - - func listActions(request: Arrow_Flight_Protocol_Empty, - responseStream: GRPC.GRPCAsyncResponseStreamWriter, - context: GRPC.GRPCAsyncServerCallContext) async throws { - if let server = arrowFlightServer { - let writer = ActionTypeStreamWriter(responseStream) - try await server.listActions(writer) - return - } - - throw ArrowFlightError.notImplemented() - } - - internal var interceptors: Arrow_Flight_Protocol_FlightServiceServerInterceptorFactoryProtocol? { return nil } - -} diff --git a/swift/ArrowFlight/Sources/ArrowFlight/FlightSql.pb.swift b/swift/ArrowFlight/Sources/ArrowFlight/FlightSql.pb.swift deleted file mode 100644 index 18b839fcbc0..00000000000 --- a/swift/ArrowFlight/Sources/ArrowFlight/FlightSql.pb.swift +++ /dev/null @@ -1,5145 +0,0 @@ -// DO NOT EDIT. -// swift-format-ignore-file -// -// Generated by the Swift generator plugin for the protocol buffer compiler. -// Source: FlightSql.proto -// -// For information on using the generated types, please see the documentation: -// https://github.com/apple/swift-protobuf/ - -// -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -//

-// http://www.apache.org/licenses/LICENSE-2.0 -//

-// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -import Foundation -import SwiftProtobuf - -// If the compiler emits an error on this type, it is because this file -// was generated by a version of the `protoc` Swift plug-in that is -// incompatible with the version of SwiftProtobuf to which you are linking. -// Please ensure that you are building against the same version of the API -// that was used to generate this file. -fileprivate struct _GeneratedWithProtocGenSwiftVersion: SwiftProtobuf.ProtobufAPIVersionCheck { - struct _2: SwiftProtobuf.ProtobufAPIVersion_2 {} - typealias Version = _2 -} - -/// Options for CommandGetSqlInfo. -enum Arrow_Flight_Protocol_Sql_SqlInfo: SwiftProtobuf.Enum { - typealias RawValue = Int - - /// Retrieves a UTF-8 string with the name of the Flight SQL Server. - case flightSqlServerName // = 0 - - /// Retrieves a UTF-8 string with the native version of the Flight SQL Server. - case flightSqlServerVersion // = 1 - - /// Retrieves a UTF-8 string with the Arrow format version of the Flight SQL Server. - case flightSqlServerArrowVersion // = 2 - - /// - /// Retrieves a boolean value indicating whether the Flight SQL Server is read only. - /// - /// Returns: - /// - false: if read-write - /// - true: if read only - case flightSqlServerReadOnly // = 3 - - /// - /// Retrieves a boolean value indicating whether the Flight SQL Server supports executing - /// SQL queries. - /// - /// Note that the absence of this info (as opposed to a false value) does not necessarily - /// mean that SQL is not supported, as this property was not originally defined. - case flightSqlServerSql // = 4 - - /// - /// Retrieves a boolean value indicating whether the Flight SQL Server supports executing - /// Substrait plans. - case flightSqlServerSubstrait // = 5 - - /// - /// Retrieves a string value indicating the minimum supported Substrait version, or null - /// if Substrait is not supported. - case flightSqlServerSubstraitMinVersion // = 6 - - /// - /// Retrieves a string value indicating the maximum supported Substrait version, or null - /// if Substrait is not supported. - case flightSqlServerSubstraitMaxVersion // = 7 - - /// - /// Retrieves an int32 indicating whether the Flight SQL Server supports the - /// BeginTransaction/EndTransaction/BeginSavepoint/EndSavepoint actions. - /// - /// Even if this is not supported, the database may still support explicit "BEGIN - /// TRANSACTION"/"COMMIT" SQL statements (see SQL_TRANSACTIONS_SUPPORTED); this property - /// is only about whether the server implements the Flight SQL API endpoints. - /// - /// The possible values are listed in `SqlSupportedTransaction`. - case flightSqlServerTransaction // = 8 - - /// - /// Retrieves a boolean value indicating whether the Flight SQL Server supports explicit - /// query cancellation (the CancelQuery action). - case flightSqlServerCancel // = 9 - - /// - /// Retrieves an int32 indicating the timeout (in milliseconds) for prepared statement handles. - /// - /// If 0, there is no timeout. Servers should reset the timeout when the handle is used in a command. - case flightSqlServerStatementTimeout // = 100 - - /// - /// Retrieves an int32 indicating the timeout (in milliseconds) for transactions, since transactions are not tied to a connection. - /// - /// If 0, there is no timeout. Servers should reset the timeout when the handle is used in a command. - case flightSqlServerTransactionTimeout // = 101 - - /// - /// Retrieves a boolean value indicating whether the Flight SQL Server supports CREATE and DROP of catalogs. - /// - /// Returns: - /// - false: if it doesn't support CREATE and DROP of catalogs. - /// - true: if it supports CREATE and DROP of catalogs. - case sqlDdlCatalog // = 500 - - /// - /// Retrieves a boolean value indicating whether the Flight SQL Server supports CREATE and DROP of schemas. - /// - /// Returns: - /// - false: if it doesn't support CREATE and DROP of schemas. - /// - true: if it supports CREATE and DROP of schemas. - case sqlDdlSchema // = 501 - - /// - /// Indicates whether the Flight SQL Server supports CREATE and DROP of tables. - /// - /// Returns: - /// - false: if it doesn't support CREATE and DROP of tables. - /// - true: if it supports CREATE and DROP of tables. - case sqlDdlTable // = 502 - - /// - /// Retrieves a int32 ordinal representing the case sensitivity of catalog, table, schema and table names. - /// - /// The possible values are listed in `arrow.flight.protocol.sql.SqlSupportedCaseSensitivity`. - case sqlIdentifierCase // = 503 - - /// Retrieves a UTF-8 string with the supported character(s) used to surround a delimited identifier. - case sqlIdentifierQuoteChar // = 504 - - /// - /// Retrieves a int32 describing the case sensitivity of quoted identifiers. - /// - /// The possible values are listed in `arrow.flight.protocol.sql.SqlSupportedCaseSensitivity`. - case sqlQuotedIdentifierCase // = 505 - - /// - /// Retrieves a boolean value indicating whether all tables are selectable. - /// - /// Returns: - /// - false: if not all tables are selectable or if none are; - /// - true: if all tables are selectable. - case sqlAllTablesAreSelectable // = 506 - - /// - /// Retrieves the null ordering. - /// - /// Returns a int32 ordinal for the null ordering being used, as described in - /// `arrow.flight.protocol.sql.SqlNullOrdering`. - case sqlNullOrdering // = 507 - - /// Retrieves a UTF-8 string list with values of the supported keywords. - case sqlKeywords // = 508 - - /// Retrieves a UTF-8 string list with values of the supported numeric functions. - case sqlNumericFunctions // = 509 - - /// Retrieves a UTF-8 string list with values of the supported string functions. - case sqlStringFunctions // = 510 - - /// Retrieves a UTF-8 string list with values of the supported system functions. - case sqlSystemFunctions // = 511 - - /// Retrieves a UTF-8 string list with values of the supported datetime functions. - case sqlDatetimeFunctions // = 512 - - /// - /// Retrieves the UTF-8 string that can be used to escape wildcard characters. - /// This is the string that can be used to escape '_' or '%' in the catalog search parameters that are a pattern - /// (and therefore use one of the wildcard characters). - /// The '_' character represents any single character; the '%' character represents any sequence of zero or more - /// characters. - case sqlSearchStringEscape // = 513 - - /// - /// Retrieves a UTF-8 string with all the "extra" characters that can be used in unquoted identifier names - /// (those beyond a-z, A-Z, 0-9 and _). - case sqlExtraNameCharacters // = 514 - - /// - /// Retrieves a boolean value indicating whether column aliasing is supported. - /// If so, the SQL AS clause can be used to provide names for computed columns or to provide alias names for columns - /// as required. - /// - /// Returns: - /// - false: if column aliasing is unsupported; - /// - true: if column aliasing is supported. - case sqlSupportsColumnAliasing // = 515 - - /// - /// Retrieves a boolean value indicating whether concatenations between null and non-null values being - /// null are supported. - /// - /// - Returns: - /// - false: if concatenations between null and non-null values being null are unsupported; - /// - true: if concatenations between null and non-null values being null are supported. - case sqlNullPlusNullIsNull // = 516 - - /// - /// Retrieves a map where the key is the type to convert from and the value is a list with the types to convert to, - /// indicating the supported conversions. Each key and each item on the list value is a value to a predefined type on - /// SqlSupportsConvert enum. - /// The returned map will be: map> - case sqlSupportsConvert // = 517 - - /// - /// Retrieves a boolean value indicating whether, when table correlation names are supported, - /// they are restricted to being different from the names of the tables. - /// - /// Returns: - /// - false: if table correlation names are unsupported; - /// - true: if table correlation names are supported. - case sqlSupportsTableCorrelationNames // = 518 - - /// - /// Retrieves a boolean value indicating whether, when table correlation names are supported, - /// they are restricted to being different from the names of the tables. - /// - /// Returns: - /// - false: if different table correlation names are unsupported; - /// - true: if different table correlation names are supported - case sqlSupportsDifferentTableCorrelationNames // = 519 - - /// - /// Retrieves a boolean value indicating whether expressions in ORDER BY lists are supported. - /// - /// Returns: - /// - false: if expressions in ORDER BY are unsupported; - /// - true: if expressions in ORDER BY are supported; - case sqlSupportsExpressionsInOrderBy // = 520 - - /// - /// Retrieves a boolean value indicating whether using a column that is not in the SELECT statement in a GROUP BY - /// clause is supported. - /// - /// Returns: - /// - false: if using a column that is not in the SELECT statement in a GROUP BY clause is unsupported; - /// - true: if using a column that is not in the SELECT statement in a GROUP BY clause is supported. - case sqlSupportsOrderByUnrelated // = 521 - - /// - /// Retrieves the supported GROUP BY commands; - /// - /// Returns an int32 bitmask value representing the supported commands. - /// The returned bitmask should be parsed in order to retrieve the supported commands. - /// - /// For instance: - /// - return 0 (\b0) => [] (GROUP BY is unsupported); - /// - return 1 (\b1) => [SQL_GROUP_BY_UNRELATED]; - /// - return 2 (\b10) => [SQL_GROUP_BY_BEYOND_SELECT]; - /// - return 3 (\b11) => [SQL_GROUP_BY_UNRELATED, SQL_GROUP_BY_BEYOND_SELECT]. - /// Valid GROUP BY types are described under `arrow.flight.protocol.sql.SqlSupportedGroupBy`. - case sqlSupportedGroupBy // = 522 - - /// - /// Retrieves a boolean value indicating whether specifying a LIKE escape clause is supported. - /// - /// Returns: - /// - false: if specifying a LIKE escape clause is unsupported; - /// - true: if specifying a LIKE escape clause is supported. - case sqlSupportsLikeEscapeClause // = 523 - - /// - /// Retrieves a boolean value indicating whether columns may be defined as non-nullable. - /// - /// Returns: - /// - false: if columns cannot be defined as non-nullable; - /// - true: if columns may be defined as non-nullable. - case sqlSupportsNonNullableColumns // = 524 - - /// - /// Retrieves the supported SQL grammar level as per the ODBC specification. - /// - /// Returns an int32 bitmask value representing the supported SQL grammar level. - /// The returned bitmask should be parsed in order to retrieve the supported grammar levels. - /// - /// For instance: - /// - return 0 (\b0) => [] (SQL grammar is unsupported); - /// - return 1 (\b1) => [SQL_MINIMUM_GRAMMAR]; - /// - return 2 (\b10) => [SQL_CORE_GRAMMAR]; - /// - return 3 (\b11) => [SQL_MINIMUM_GRAMMAR, SQL_CORE_GRAMMAR]; - /// - return 4 (\b100) => [SQL_EXTENDED_GRAMMAR]; - /// - return 5 (\b101) => [SQL_MINIMUM_GRAMMAR, SQL_EXTENDED_GRAMMAR]; - /// - return 6 (\b110) => [SQL_CORE_GRAMMAR, SQL_EXTENDED_GRAMMAR]; - /// - return 7 (\b111) => [SQL_MINIMUM_GRAMMAR, SQL_CORE_GRAMMAR, SQL_EXTENDED_GRAMMAR]. - /// Valid SQL grammar levels are described under `arrow.flight.protocol.sql.SupportedSqlGrammar`. - case sqlSupportedGrammar // = 525 - - /// - /// Retrieves the supported ANSI92 SQL grammar level. - /// - /// Returns an int32 bitmask value representing the supported ANSI92 SQL grammar level. - /// The returned bitmask should be parsed in order to retrieve the supported commands. - /// - /// For instance: - /// - return 0 (\b0) => [] (ANSI92 SQL grammar is unsupported); - /// - return 1 (\b1) => [ANSI92_ENTRY_SQL]; - /// - return 2 (\b10) => [ANSI92_INTERMEDIATE_SQL]; - /// - return 3 (\b11) => [ANSI92_ENTRY_SQL, ANSI92_INTERMEDIATE_SQL]; - /// - return 4 (\b100) => [ANSI92_FULL_SQL]; - /// - return 5 (\b101) => [ANSI92_ENTRY_SQL, ANSI92_FULL_SQL]; - /// - return 6 (\b110) => [ANSI92_INTERMEDIATE_SQL, ANSI92_FULL_SQL]; - /// - return 7 (\b111) => [ANSI92_ENTRY_SQL, ANSI92_INTERMEDIATE_SQL, ANSI92_FULL_SQL]. - /// Valid ANSI92 SQL grammar levels are described under `arrow.flight.protocol.sql.SupportedAnsi92SqlGrammarLevel`. - case sqlAnsi92SupportedLevel // = 526 - - /// - /// Retrieves a boolean value indicating whether the SQL Integrity Enhancement Facility is supported. - /// - /// Returns: - /// - false: if the SQL Integrity Enhancement Facility is supported; - /// - true: if the SQL Integrity Enhancement Facility is supported. - case sqlSupportsIntegrityEnhancementFacility // = 527 - - /// - /// Retrieves the support level for SQL OUTER JOINs. - /// - /// Returns a int32 ordinal for the SQL ordering being used, as described in - /// `arrow.flight.protocol.sql.SqlOuterJoinsSupportLevel`. - case sqlOuterJoinsSupportLevel // = 528 - - /// Retrieves a UTF-8 string with the preferred term for "schema". - case sqlSchemaTerm // = 529 - - /// Retrieves a UTF-8 string with the preferred term for "procedure". - case sqlProcedureTerm // = 530 - - /// - /// Retrieves a UTF-8 string with the preferred term for "catalog". - /// If a empty string is returned its assumed that the server does NOT supports catalogs. - case sqlCatalogTerm // = 531 - - /// - /// Retrieves a boolean value indicating whether a catalog appears at the start of a fully qualified table name. - /// - /// - false: if a catalog does not appear at the start of a fully qualified table name; - /// - true: if a catalog appears at the start of a fully qualified table name. - case sqlCatalogAtStart // = 532 - - /// - /// Retrieves the supported actions for a SQL schema. - /// - /// Returns an int32 bitmask value representing the supported actions for a SQL schema. - /// The returned bitmask should be parsed in order to retrieve the supported actions for a SQL schema. - /// - /// For instance: - /// - return 0 (\b0) => [] (no supported actions for SQL schema); - /// - return 1 (\b1) => [SQL_ELEMENT_IN_PROCEDURE_CALLS]; - /// - return 2 (\b10) => [SQL_ELEMENT_IN_INDEX_DEFINITIONS]; - /// - return 3 (\b11) => [SQL_ELEMENT_IN_PROCEDURE_CALLS, SQL_ELEMENT_IN_INDEX_DEFINITIONS]; - /// - return 4 (\b100) => [SQL_ELEMENT_IN_PRIVILEGE_DEFINITIONS]; - /// - return 5 (\b101) => [SQL_ELEMENT_IN_PROCEDURE_CALLS, SQL_ELEMENT_IN_PRIVILEGE_DEFINITIONS]; - /// - return 6 (\b110) => [SQL_ELEMENT_IN_INDEX_DEFINITIONS, SQL_ELEMENT_IN_PRIVILEGE_DEFINITIONS]; - /// - return 7 (\b111) => [SQL_ELEMENT_IN_PROCEDURE_CALLS, SQL_ELEMENT_IN_INDEX_DEFINITIONS, SQL_ELEMENT_IN_PRIVILEGE_DEFINITIONS]. - /// Valid actions for a SQL schema described under `arrow.flight.protocol.sql.SqlSupportedElementActions`. - case sqlSchemasSupportedActions // = 533 - - /// - /// Retrieves the supported actions for a SQL schema. - /// - /// Returns an int32 bitmask value representing the supported actions for a SQL catalog. - /// The returned bitmask should be parsed in order to retrieve the supported actions for a SQL catalog. - /// - /// For instance: - /// - return 0 (\b0) => [] (no supported actions for SQL catalog); - /// - return 1 (\b1) => [SQL_ELEMENT_IN_PROCEDURE_CALLS]; - /// - return 2 (\b10) => [SQL_ELEMENT_IN_INDEX_DEFINITIONS]; - /// - return 3 (\b11) => [SQL_ELEMENT_IN_PROCEDURE_CALLS, SQL_ELEMENT_IN_INDEX_DEFINITIONS]; - /// - return 4 (\b100) => [SQL_ELEMENT_IN_PRIVILEGE_DEFINITIONS]; - /// - return 5 (\b101) => [SQL_ELEMENT_IN_PROCEDURE_CALLS, SQL_ELEMENT_IN_PRIVILEGE_DEFINITIONS]; - /// - return 6 (\b110) => [SQL_ELEMENT_IN_INDEX_DEFINITIONS, SQL_ELEMENT_IN_PRIVILEGE_DEFINITIONS]; - /// - return 7 (\b111) => [SQL_ELEMENT_IN_PROCEDURE_CALLS, SQL_ELEMENT_IN_INDEX_DEFINITIONS, SQL_ELEMENT_IN_PRIVILEGE_DEFINITIONS]. - /// Valid actions for a SQL catalog are described under `arrow.flight.protocol.sql.SqlSupportedElementActions`. - case sqlCatalogsSupportedActions // = 534 - - /// - /// Retrieves the supported SQL positioned commands. - /// - /// Returns an int32 bitmask value representing the supported SQL positioned commands. - /// The returned bitmask should be parsed in order to retrieve the supported SQL positioned commands. - /// - /// For instance: - /// - return 0 (\b0) => [] (no supported SQL positioned commands); - /// - return 1 (\b1) => [SQL_POSITIONED_DELETE]; - /// - return 2 (\b10) => [SQL_POSITIONED_UPDATE]; - /// - return 3 (\b11) => [SQL_POSITIONED_DELETE, SQL_POSITIONED_UPDATE]. - /// Valid SQL positioned commands are described under `arrow.flight.protocol.sql.SqlSupportedPositionedCommands`. - case sqlSupportedPositionedCommands // = 535 - - /// - /// Retrieves a boolean value indicating whether SELECT FOR UPDATE statements are supported. - /// - /// Returns: - /// - false: if SELECT FOR UPDATE statements are unsupported; - /// - true: if SELECT FOR UPDATE statements are supported. - case sqlSelectForUpdateSupported // = 536 - - /// - /// Retrieves a boolean value indicating whether stored procedure calls that use the stored procedure escape syntax - /// are supported. - /// - /// Returns: - /// - false: if stored procedure calls that use the stored procedure escape syntax are unsupported; - /// - true: if stored procedure calls that use the stored procedure escape syntax are supported. - case sqlStoredProceduresSupported // = 537 - - /// - /// Retrieves the supported SQL subqueries. - /// - /// Returns an int32 bitmask value representing the supported SQL subqueries. - /// The returned bitmask should be parsed in order to retrieve the supported SQL subqueries. - /// - /// For instance: - /// - return 0 (\b0) => [] (no supported SQL subqueries); - /// - return 1 (\b1) => [SQL_SUBQUERIES_IN_COMPARISONS]; - /// - return 2 (\b10) => [SQL_SUBQUERIES_IN_EXISTS]; - /// - return 3 (\b11) => [SQL_SUBQUERIES_IN_COMPARISONS, SQL_SUBQUERIES_IN_EXISTS]; - /// - return 4 (\b100) => [SQL_SUBQUERIES_IN_INS]; - /// - return 5 (\b101) => [SQL_SUBQUERIES_IN_COMPARISONS, SQL_SUBQUERIES_IN_INS]; - /// - return 6 (\b110) => [SQL_SUBQUERIES_IN_INS, SQL_SUBQUERIES_IN_EXISTS]; - /// - return 7 (\b111) => [SQL_SUBQUERIES_IN_COMPARISONS, SQL_SUBQUERIES_IN_EXISTS, SQL_SUBQUERIES_IN_INS]; - /// - return 8 (\b1000) => [SQL_SUBQUERIES_IN_QUANTIFIEDS]; - /// - return 9 (\b1001) => [SQL_SUBQUERIES_IN_COMPARISONS, SQL_SUBQUERIES_IN_QUANTIFIEDS]; - /// - return 10 (\b1010) => [SQL_SUBQUERIES_IN_EXISTS, SQL_SUBQUERIES_IN_QUANTIFIEDS]; - /// - return 11 (\b1011) => [SQL_SUBQUERIES_IN_COMPARISONS, SQL_SUBQUERIES_IN_EXISTS, SQL_SUBQUERIES_IN_QUANTIFIEDS]; - /// - return 12 (\b1100) => [SQL_SUBQUERIES_IN_INS, SQL_SUBQUERIES_IN_QUANTIFIEDS]; - /// - return 13 (\b1101) => [SQL_SUBQUERIES_IN_COMPARISONS, SQL_SUBQUERIES_IN_INS, SQL_SUBQUERIES_IN_QUANTIFIEDS]; - /// - return 14 (\b1110) => [SQL_SUBQUERIES_IN_EXISTS, SQL_SUBQUERIES_IN_INS, SQL_SUBQUERIES_IN_QUANTIFIEDS]; - /// - return 15 (\b1111) => [SQL_SUBQUERIES_IN_COMPARISONS, SQL_SUBQUERIES_IN_EXISTS, SQL_SUBQUERIES_IN_INS, SQL_SUBQUERIES_IN_QUANTIFIEDS]; - /// - ... - /// Valid SQL subqueries are described under `arrow.flight.protocol.sql.SqlSupportedSubqueries`. - case sqlSupportedSubqueries // = 538 - - /// - /// Retrieves a boolean value indicating whether correlated subqueries are supported. - /// - /// Returns: - /// - false: if correlated subqueries are unsupported; - /// - true: if correlated subqueries are supported. - case sqlCorrelatedSubqueriesSupported // = 539 - - /// - /// Retrieves the supported SQL UNIONs. - /// - /// Returns an int32 bitmask value representing the supported SQL UNIONs. - /// The returned bitmask should be parsed in order to retrieve the supported SQL UNIONs. - /// - /// For instance: - /// - return 0 (\b0) => [] (no supported SQL positioned commands); - /// - return 1 (\b1) => [SQL_UNION]; - /// - return 2 (\b10) => [SQL_UNION_ALL]; - /// - return 3 (\b11) => [SQL_UNION, SQL_UNION_ALL]. - /// Valid SQL positioned commands are described under `arrow.flight.protocol.sql.SqlSupportedUnions`. - case sqlSupportedUnions // = 540 - - /// Retrieves a int64 value representing the maximum number of hex characters allowed in an inline binary literal. - case sqlMaxBinaryLiteralLength // = 541 - - /// Retrieves a int64 value representing the maximum number of characters allowed for a character literal. - case sqlMaxCharLiteralLength // = 542 - - /// Retrieves a int64 value representing the maximum number of characters allowed for a column name. - case sqlMaxColumnNameLength // = 543 - - /// Retrieves a int64 value representing the maximum number of columns allowed in a GROUP BY clause. - case sqlMaxColumnsInGroupBy // = 544 - - /// Retrieves a int64 value representing the maximum number of columns allowed in an index. - case sqlMaxColumnsInIndex // = 545 - - /// Retrieves a int64 value representing the maximum number of columns allowed in an ORDER BY clause. - case sqlMaxColumnsInOrderBy // = 546 - - /// Retrieves a int64 value representing the maximum number of columns allowed in a SELECT list. - case sqlMaxColumnsInSelect // = 547 - - /// Retrieves a int64 value representing the maximum number of columns allowed in a table. - case sqlMaxColumnsInTable // = 548 - - /// Retrieves a int64 value representing the maximum number of concurrent connections possible. - case sqlMaxConnections // = 549 - - /// Retrieves a int64 value the maximum number of characters allowed in a cursor name. - case sqlMaxCursorNameLength // = 550 - - /// - /// Retrieves a int64 value representing the maximum number of bytes allowed for an index, - /// including all of the parts of the index. - case sqlMaxIndexLength // = 551 - - /// Retrieves a int64 value representing the maximum number of characters allowed in a schema name. - case sqlDbSchemaNameLength // = 552 - - /// Retrieves a int64 value representing the maximum number of characters allowed in a procedure name. - case sqlMaxProcedureNameLength // = 553 - - /// Retrieves a int64 value representing the maximum number of characters allowed in a catalog name. - case sqlMaxCatalogNameLength // = 554 - - /// Retrieves a int64 value representing the maximum number of bytes allowed in a single row. - case sqlMaxRowSize // = 555 - - /// - /// Retrieves a boolean indicating whether the return value for the JDBC method getMaxRowSize includes the SQL - /// data types LONGVARCHAR and LONGVARBINARY. - /// - /// Returns: - /// - false: if return value for the JDBC method getMaxRowSize does - /// not include the SQL data types LONGVARCHAR and LONGVARBINARY; - /// - true: if return value for the JDBC method getMaxRowSize includes - /// the SQL data types LONGVARCHAR and LONGVARBINARY. - case sqlMaxRowSizeIncludesBlobs // = 556 - - /// - /// Retrieves a int64 value representing the maximum number of characters allowed for an SQL statement; - /// a result of 0 (zero) means that there is no limit or the limit is not known. - case sqlMaxStatementLength // = 557 - - /// Retrieves a int64 value representing the maximum number of active statements that can be open at the same time. - case sqlMaxStatements // = 558 - - /// Retrieves a int64 value representing the maximum number of characters allowed in a table name. - case sqlMaxTableNameLength // = 559 - - /// Retrieves a int64 value representing the maximum number of tables allowed in a SELECT statement. - case sqlMaxTablesInSelect // = 560 - - /// Retrieves a int64 value representing the maximum number of characters allowed in a user name. - case sqlMaxUsernameLength // = 561 - - /// - /// Retrieves this database's default transaction isolation level as described in - /// `arrow.flight.protocol.sql.SqlTransactionIsolationLevel`. - /// - /// Returns a int32 ordinal for the SQL transaction isolation level. - case sqlDefaultTransactionIsolation // = 562 - - /// - /// Retrieves a boolean value indicating whether transactions are supported. If not, invoking the method commit is a - /// noop, and the isolation level is `arrow.flight.protocol.sql.SqlTransactionIsolationLevel.TRANSACTION_NONE`. - /// - /// Returns: - /// - false: if transactions are unsupported; - /// - true: if transactions are supported. - case sqlTransactionsSupported // = 563 - - /// - /// Retrieves the supported transactions isolation levels. - /// - /// Returns an int32 bitmask value representing the supported transactions isolation levels. - /// The returned bitmask should be parsed in order to retrieve the supported transactions isolation levels. - /// - /// For instance: - /// - return 0 (\b0) => [] (no supported SQL transactions isolation levels); - /// - return 1 (\b1) => [SQL_TRANSACTION_NONE]; - /// - return 2 (\b10) => [SQL_TRANSACTION_READ_UNCOMMITTED]; - /// - return 3 (\b11) => [SQL_TRANSACTION_NONE, SQL_TRANSACTION_READ_UNCOMMITTED]; - /// - return 4 (\b100) => [SQL_TRANSACTION_REPEATABLE_READ]; - /// - return 5 (\b101) => [SQL_TRANSACTION_NONE, SQL_TRANSACTION_REPEATABLE_READ]; - /// - return 6 (\b110) => [SQL_TRANSACTION_READ_UNCOMMITTED, SQL_TRANSACTION_REPEATABLE_READ]; - /// - return 7 (\b111) => [SQL_TRANSACTION_NONE, SQL_TRANSACTION_READ_UNCOMMITTED, SQL_TRANSACTION_REPEATABLE_READ]; - /// - return 8 (\b1000) => [SQL_TRANSACTION_REPEATABLE_READ]; - /// - return 9 (\b1001) => [SQL_TRANSACTION_NONE, SQL_TRANSACTION_REPEATABLE_READ]; - /// - return 10 (\b1010) => [SQL_TRANSACTION_READ_UNCOMMITTED, SQL_TRANSACTION_REPEATABLE_READ]; - /// - return 11 (\b1011) => [SQL_TRANSACTION_NONE, SQL_TRANSACTION_READ_UNCOMMITTED, SQL_TRANSACTION_REPEATABLE_READ]; - /// - return 12 (\b1100) => [SQL_TRANSACTION_REPEATABLE_READ, SQL_TRANSACTION_REPEATABLE_READ]; - /// - return 13 (\b1101) => [SQL_TRANSACTION_NONE, SQL_TRANSACTION_REPEATABLE_READ, SQL_TRANSACTION_REPEATABLE_READ]; - /// - return 14 (\b1110) => [SQL_TRANSACTION_READ_UNCOMMITTED, SQL_TRANSACTION_REPEATABLE_READ, SQL_TRANSACTION_REPEATABLE_READ]; - /// - return 15 (\b1111) => [SQL_TRANSACTION_NONE, SQL_TRANSACTION_READ_UNCOMMITTED, SQL_TRANSACTION_REPEATABLE_READ, SQL_TRANSACTION_REPEATABLE_READ]; - /// - return 16 (\b10000) => [SQL_TRANSACTION_SERIALIZABLE]; - /// - ... - /// Valid SQL positioned commands are described under `arrow.flight.protocol.sql.SqlTransactionIsolationLevel`. - case sqlSupportedTransactionsIsolationLevels // = 564 - - /// - /// Retrieves a boolean value indicating whether a data definition statement within a transaction forces - /// the transaction to commit. - /// - /// Returns: - /// - false: if a data definition statement within a transaction does not force the transaction to commit; - /// - true: if a data definition statement within a transaction forces the transaction to commit. - case sqlDataDefinitionCausesTransactionCommit // = 565 - - /// - /// Retrieves a boolean value indicating whether a data definition statement within a transaction is ignored. - /// - /// Returns: - /// - false: if a data definition statement within a transaction is taken into account; - /// - true: a data definition statement within a transaction is ignored. - case sqlDataDefinitionsInTransactionsIgnored // = 566 - - /// - /// Retrieves an int32 bitmask value representing the supported result set types. - /// The returned bitmask should be parsed in order to retrieve the supported result set types. - /// - /// For instance: - /// - return 0 (\b0) => [] (no supported result set types); - /// - return 1 (\b1) => [SQL_RESULT_SET_TYPE_UNSPECIFIED]; - /// - return 2 (\b10) => [SQL_RESULT_SET_TYPE_FORWARD_ONLY]; - /// - return 3 (\b11) => [SQL_RESULT_SET_TYPE_UNSPECIFIED, SQL_RESULT_SET_TYPE_FORWARD_ONLY]; - /// - return 4 (\b100) => [SQL_RESULT_SET_TYPE_SCROLL_INSENSITIVE]; - /// - return 5 (\b101) => [SQL_RESULT_SET_TYPE_UNSPECIFIED, SQL_RESULT_SET_TYPE_SCROLL_INSENSITIVE]; - /// - return 6 (\b110) => [SQL_RESULT_SET_TYPE_FORWARD_ONLY, SQL_RESULT_SET_TYPE_SCROLL_INSENSITIVE]; - /// - return 7 (\b111) => [SQL_RESULT_SET_TYPE_UNSPECIFIED, SQL_RESULT_SET_TYPE_FORWARD_ONLY, SQL_RESULT_SET_TYPE_SCROLL_INSENSITIVE]; - /// - return 8 (\b1000) => [SQL_RESULT_SET_TYPE_SCROLL_SENSITIVE]; - /// - ... - /// Valid result set types are described under `arrow.flight.protocol.sql.SqlSupportedResultSetType`. - case sqlSupportedResultSetTypes // = 567 - - /// - /// Returns an int32 bitmask value concurrency types supported for - /// `arrow.flight.protocol.sql.SqlSupportedResultSetType.SQL_RESULT_SET_TYPE_UNSPECIFIED`. - /// - /// For instance: - /// - return 0 (\b0) => [] (no supported concurrency types for this result set type) - /// - return 1 (\b1) => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED] - /// - return 2 (\b10) => [SQL_RESULT_SET_CONCURRENCY_READ_ONLY] - /// - return 3 (\b11) => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED, SQL_RESULT_SET_CONCURRENCY_READ_ONLY] - /// - return 4 (\b100) => [SQL_RESULT_SET_CONCURRENCY_UPDATABLE] - /// - return 5 (\b101) => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED, SQL_RESULT_SET_CONCURRENCY_UPDATABLE] - /// - return 6 (\b110) => [SQL_RESULT_SET_CONCURRENCY_READ_ONLY, SQL_RESULT_SET_CONCURRENCY_UPDATABLE] - /// - return 7 (\b111) => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED, SQL_RESULT_SET_CONCURRENCY_READ_ONLY, SQL_RESULT_SET_CONCURRENCY_UPDATABLE] - /// Valid result set types are described under `arrow.flight.protocol.sql.SqlSupportedResultSetConcurrency`. - case sqlSupportedConcurrenciesForResultSetUnspecified // = 568 - - /// - /// Returns an int32 bitmask value concurrency types supported for - /// `arrow.flight.protocol.sql.SqlSupportedResultSetType.SQL_RESULT_SET_TYPE_FORWARD_ONLY`. - /// - /// For instance: - /// - return 0 (\b0) => [] (no supported concurrency types for this result set type) - /// - return 1 (\b1) => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED] - /// - return 2 (\b10) => [SQL_RESULT_SET_CONCURRENCY_READ_ONLY] - /// - return 3 (\b11) => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED, SQL_RESULT_SET_CONCURRENCY_READ_ONLY] - /// - return 4 (\b100) => [SQL_RESULT_SET_CONCURRENCY_UPDATABLE] - /// - return 5 (\b101) => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED, SQL_RESULT_SET_CONCURRENCY_UPDATABLE] - /// - return 6 (\b110) => [SQL_RESULT_SET_CONCURRENCY_READ_ONLY, SQL_RESULT_SET_CONCURRENCY_UPDATABLE] - /// - return 7 (\b111) => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED, SQL_RESULT_SET_CONCURRENCY_READ_ONLY, SQL_RESULT_SET_CONCURRENCY_UPDATABLE] - /// Valid result set types are described under `arrow.flight.protocol.sql.SqlSupportedResultSetConcurrency`. - case sqlSupportedConcurrenciesForResultSetForwardOnly // = 569 - - /// - /// Returns an int32 bitmask value concurrency types supported for - /// `arrow.flight.protocol.sql.SqlSupportedResultSetType.SQL_RESULT_SET_TYPE_SCROLL_SENSITIVE`. - /// - /// For instance: - /// - return 0 (\b0) => [] (no supported concurrency types for this result set type) - /// - return 1 (\b1) => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED] - /// - return 2 (\b10) => [SQL_RESULT_SET_CONCURRENCY_READ_ONLY] - /// - return 3 (\b11) => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED, SQL_RESULT_SET_CONCURRENCY_READ_ONLY] - /// - return 4 (\b100) => [SQL_RESULT_SET_CONCURRENCY_UPDATABLE] - /// - return 5 (\b101) => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED, SQL_RESULT_SET_CONCURRENCY_UPDATABLE] - /// - return 6 (\b110) => [SQL_RESULT_SET_CONCURRENCY_READ_ONLY, SQL_RESULT_SET_CONCURRENCY_UPDATABLE] - /// - return 7 (\b111) => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED, SQL_RESULT_SET_CONCURRENCY_READ_ONLY, SQL_RESULT_SET_CONCURRENCY_UPDATABLE] - /// Valid result set types are described under `arrow.flight.protocol.sql.SqlSupportedResultSetConcurrency`. - case sqlSupportedConcurrenciesForResultSetScrollSensitive // = 570 - - /// - /// Returns an int32 bitmask value concurrency types supported for - /// `arrow.flight.protocol.sql.SqlSupportedResultSetType.SQL_RESULT_SET_TYPE_SCROLL_INSENSITIVE`. - /// - /// For instance: - /// - return 0 (\b0) => [] (no supported concurrency types for this result set type) - /// - return 1 (\b1) => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED] - /// - return 2 (\b10) => [SQL_RESULT_SET_CONCURRENCY_READ_ONLY] - /// - return 3 (\b11) => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED, SQL_RESULT_SET_CONCURRENCY_READ_ONLY] - /// - return 4 (\b100) => [SQL_RESULT_SET_CONCURRENCY_UPDATABLE] - /// - return 5 (\b101) => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED, SQL_RESULT_SET_CONCURRENCY_UPDATABLE] - /// - return 6 (\b110) => [SQL_RESULT_SET_CONCURRENCY_READ_ONLY, SQL_RESULT_SET_CONCURRENCY_UPDATABLE] - /// - return 7 (\b111) => [SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED, SQL_RESULT_SET_CONCURRENCY_READ_ONLY, SQL_RESULT_SET_CONCURRENCY_UPDATABLE] - /// Valid result set types are described under `arrow.flight.protocol.sql.SqlSupportedResultSetConcurrency`. - case sqlSupportedConcurrenciesForResultSetScrollInsensitive // = 571 - - /// - /// Retrieves a boolean value indicating whether this database supports batch updates. - /// - /// - false: if this database does not support batch updates; - /// - true: if this database supports batch updates. - case sqlBatchUpdatesSupported // = 572 - - /// - /// Retrieves a boolean value indicating whether this database supports savepoints. - /// - /// Returns: - /// - false: if this database does not support savepoints; - /// - true: if this database supports savepoints. - case sqlSavepointsSupported // = 573 - - /// - /// Retrieves a boolean value indicating whether named parameters are supported in callable statements. - /// - /// Returns: - /// - false: if named parameters in callable statements are unsupported; - /// - true: if named parameters in callable statements are supported. - case sqlNamedParametersSupported // = 574 - - /// - /// Retrieves a boolean value indicating whether updates made to a LOB are made on a copy or directly to the LOB. - /// - /// Returns: - /// - false: if updates made to a LOB are made directly to the LOB; - /// - true: if updates made to a LOB are made on a copy. - case sqlLocatorsUpdateCopy // = 575 - - /// - /// Retrieves a boolean value indicating whether invoking user-defined or vendor functions - /// using the stored procedure escape syntax is supported. - /// - /// Returns: - /// - false: if invoking user-defined or vendor functions using the stored procedure escape syntax is unsupported; - /// - true: if invoking user-defined or vendor functions using the stored procedure escape syntax is supported. - case sqlStoredFunctionsUsingCallSyntaxSupported // = 576 - case UNRECOGNIZED(Int) - - init() { - self = .flightSqlServerName - } - - init?(rawValue: Int) { - switch rawValue { - case 0: self = .flightSqlServerName - case 1: self = .flightSqlServerVersion - case 2: self = .flightSqlServerArrowVersion - case 3: self = .flightSqlServerReadOnly - case 4: self = .flightSqlServerSql - case 5: self = .flightSqlServerSubstrait - case 6: self = .flightSqlServerSubstraitMinVersion - case 7: self = .flightSqlServerSubstraitMaxVersion - case 8: self = .flightSqlServerTransaction - case 9: self = .flightSqlServerCancel - case 100: self = .flightSqlServerStatementTimeout - case 101: self = .flightSqlServerTransactionTimeout - case 500: self = .sqlDdlCatalog - case 501: self = .sqlDdlSchema - case 502: self = .sqlDdlTable - case 503: self = .sqlIdentifierCase - case 504: self = .sqlIdentifierQuoteChar - case 505: self = .sqlQuotedIdentifierCase - case 506: self = .sqlAllTablesAreSelectable - case 507: self = .sqlNullOrdering - case 508: self = .sqlKeywords - case 509: self = .sqlNumericFunctions - case 510: self = .sqlStringFunctions - case 511: self = .sqlSystemFunctions - case 512: self = .sqlDatetimeFunctions - case 513: self = .sqlSearchStringEscape - case 514: self = .sqlExtraNameCharacters - case 515: self = .sqlSupportsColumnAliasing - case 516: self = .sqlNullPlusNullIsNull - case 517: self = .sqlSupportsConvert - case 518: self = .sqlSupportsTableCorrelationNames - case 519: self = .sqlSupportsDifferentTableCorrelationNames - case 520: self = .sqlSupportsExpressionsInOrderBy - case 521: self = .sqlSupportsOrderByUnrelated - case 522: self = .sqlSupportedGroupBy - case 523: self = .sqlSupportsLikeEscapeClause - case 524: self = .sqlSupportsNonNullableColumns - case 525: self = .sqlSupportedGrammar - case 526: self = .sqlAnsi92SupportedLevel - case 527: self = .sqlSupportsIntegrityEnhancementFacility - case 528: self = .sqlOuterJoinsSupportLevel - case 529: self = .sqlSchemaTerm - case 530: self = .sqlProcedureTerm - case 531: self = .sqlCatalogTerm - case 532: self = .sqlCatalogAtStart - case 533: self = .sqlSchemasSupportedActions - case 534: self = .sqlCatalogsSupportedActions - case 535: self = .sqlSupportedPositionedCommands - case 536: self = .sqlSelectForUpdateSupported - case 537: self = .sqlStoredProceduresSupported - case 538: self = .sqlSupportedSubqueries - case 539: self = .sqlCorrelatedSubqueriesSupported - case 540: self = .sqlSupportedUnions - case 541: self = .sqlMaxBinaryLiteralLength - case 542: self = .sqlMaxCharLiteralLength - case 543: self = .sqlMaxColumnNameLength - case 544: self = .sqlMaxColumnsInGroupBy - case 545: self = .sqlMaxColumnsInIndex - case 546: self = .sqlMaxColumnsInOrderBy - case 547: self = .sqlMaxColumnsInSelect - case 548: self = .sqlMaxColumnsInTable - case 549: self = .sqlMaxConnections - case 550: self = .sqlMaxCursorNameLength - case 551: self = .sqlMaxIndexLength - case 552: self = .sqlDbSchemaNameLength - case 553: self = .sqlMaxProcedureNameLength - case 554: self = .sqlMaxCatalogNameLength - case 555: self = .sqlMaxRowSize - case 556: self = .sqlMaxRowSizeIncludesBlobs - case 557: self = .sqlMaxStatementLength - case 558: self = .sqlMaxStatements - case 559: self = .sqlMaxTableNameLength - case 560: self = .sqlMaxTablesInSelect - case 561: self = .sqlMaxUsernameLength - case 562: self = .sqlDefaultTransactionIsolation - case 563: self = .sqlTransactionsSupported - case 564: self = .sqlSupportedTransactionsIsolationLevels - case 565: self = .sqlDataDefinitionCausesTransactionCommit - case 566: self = .sqlDataDefinitionsInTransactionsIgnored - case 567: self = .sqlSupportedResultSetTypes - case 568: self = .sqlSupportedConcurrenciesForResultSetUnspecified - case 569: self = .sqlSupportedConcurrenciesForResultSetForwardOnly - case 570: self = .sqlSupportedConcurrenciesForResultSetScrollSensitive - case 571: self = .sqlSupportedConcurrenciesForResultSetScrollInsensitive - case 572: self = .sqlBatchUpdatesSupported - case 573: self = .sqlSavepointsSupported - case 574: self = .sqlNamedParametersSupported - case 575: self = .sqlLocatorsUpdateCopy - case 576: self = .sqlStoredFunctionsUsingCallSyntaxSupported - default: self = .UNRECOGNIZED(rawValue) - } - } - - var rawValue: Int { - switch self { - case .flightSqlServerName: return 0 - case .flightSqlServerVersion: return 1 - case .flightSqlServerArrowVersion: return 2 - case .flightSqlServerReadOnly: return 3 - case .flightSqlServerSql: return 4 - case .flightSqlServerSubstrait: return 5 - case .flightSqlServerSubstraitMinVersion: return 6 - case .flightSqlServerSubstraitMaxVersion: return 7 - case .flightSqlServerTransaction: return 8 - case .flightSqlServerCancel: return 9 - case .flightSqlServerStatementTimeout: return 100 - case .flightSqlServerTransactionTimeout: return 101 - case .sqlDdlCatalog: return 500 - case .sqlDdlSchema: return 501 - case .sqlDdlTable: return 502 - case .sqlIdentifierCase: return 503 - case .sqlIdentifierQuoteChar: return 504 - case .sqlQuotedIdentifierCase: return 505 - case .sqlAllTablesAreSelectable: return 506 - case .sqlNullOrdering: return 507 - case .sqlKeywords: return 508 - case .sqlNumericFunctions: return 509 - case .sqlStringFunctions: return 510 - case .sqlSystemFunctions: return 511 - case .sqlDatetimeFunctions: return 512 - case .sqlSearchStringEscape: return 513 - case .sqlExtraNameCharacters: return 514 - case .sqlSupportsColumnAliasing: return 515 - case .sqlNullPlusNullIsNull: return 516 - case .sqlSupportsConvert: return 517 - case .sqlSupportsTableCorrelationNames: return 518 - case .sqlSupportsDifferentTableCorrelationNames: return 519 - case .sqlSupportsExpressionsInOrderBy: return 520 - case .sqlSupportsOrderByUnrelated: return 521 - case .sqlSupportedGroupBy: return 522 - case .sqlSupportsLikeEscapeClause: return 523 - case .sqlSupportsNonNullableColumns: return 524 - case .sqlSupportedGrammar: return 525 - case .sqlAnsi92SupportedLevel: return 526 - case .sqlSupportsIntegrityEnhancementFacility: return 527 - case .sqlOuterJoinsSupportLevel: return 528 - case .sqlSchemaTerm: return 529 - case .sqlProcedureTerm: return 530 - case .sqlCatalogTerm: return 531 - case .sqlCatalogAtStart: return 532 - case .sqlSchemasSupportedActions: return 533 - case .sqlCatalogsSupportedActions: return 534 - case .sqlSupportedPositionedCommands: return 535 - case .sqlSelectForUpdateSupported: return 536 - case .sqlStoredProceduresSupported: return 537 - case .sqlSupportedSubqueries: return 538 - case .sqlCorrelatedSubqueriesSupported: return 539 - case .sqlSupportedUnions: return 540 - case .sqlMaxBinaryLiteralLength: return 541 - case .sqlMaxCharLiteralLength: return 542 - case .sqlMaxColumnNameLength: return 543 - case .sqlMaxColumnsInGroupBy: return 544 - case .sqlMaxColumnsInIndex: return 545 - case .sqlMaxColumnsInOrderBy: return 546 - case .sqlMaxColumnsInSelect: return 547 - case .sqlMaxColumnsInTable: return 548 - case .sqlMaxConnections: return 549 - case .sqlMaxCursorNameLength: return 550 - case .sqlMaxIndexLength: return 551 - case .sqlDbSchemaNameLength: return 552 - case .sqlMaxProcedureNameLength: return 553 - case .sqlMaxCatalogNameLength: return 554 - case .sqlMaxRowSize: return 555 - case .sqlMaxRowSizeIncludesBlobs: return 556 - case .sqlMaxStatementLength: return 557 - case .sqlMaxStatements: return 558 - case .sqlMaxTableNameLength: return 559 - case .sqlMaxTablesInSelect: return 560 - case .sqlMaxUsernameLength: return 561 - case .sqlDefaultTransactionIsolation: return 562 - case .sqlTransactionsSupported: return 563 - case .sqlSupportedTransactionsIsolationLevels: return 564 - case .sqlDataDefinitionCausesTransactionCommit: return 565 - case .sqlDataDefinitionsInTransactionsIgnored: return 566 - case .sqlSupportedResultSetTypes: return 567 - case .sqlSupportedConcurrenciesForResultSetUnspecified: return 568 - case .sqlSupportedConcurrenciesForResultSetForwardOnly: return 569 - case .sqlSupportedConcurrenciesForResultSetScrollSensitive: return 570 - case .sqlSupportedConcurrenciesForResultSetScrollInsensitive: return 571 - case .sqlBatchUpdatesSupported: return 572 - case .sqlSavepointsSupported: return 573 - case .sqlNamedParametersSupported: return 574 - case .sqlLocatorsUpdateCopy: return 575 - case .sqlStoredFunctionsUsingCallSyntaxSupported: return 576 - case .UNRECOGNIZED(let i): return i - } - } - -} - -#if swift(>=4.2) - -extension Arrow_Flight_Protocol_Sql_SqlInfo: CaseIterable { - // The compiler won't synthesize support with the UNRECOGNIZED case. - static var allCases: [Arrow_Flight_Protocol_Sql_SqlInfo] = [ - .flightSqlServerName, - .flightSqlServerVersion, - .flightSqlServerArrowVersion, - .flightSqlServerReadOnly, - .flightSqlServerSql, - .flightSqlServerSubstrait, - .flightSqlServerSubstraitMinVersion, - .flightSqlServerSubstraitMaxVersion, - .flightSqlServerTransaction, - .flightSqlServerCancel, - .flightSqlServerStatementTimeout, - .flightSqlServerTransactionTimeout, - .sqlDdlCatalog, - .sqlDdlSchema, - .sqlDdlTable, - .sqlIdentifierCase, - .sqlIdentifierQuoteChar, - .sqlQuotedIdentifierCase, - .sqlAllTablesAreSelectable, - .sqlNullOrdering, - .sqlKeywords, - .sqlNumericFunctions, - .sqlStringFunctions, - .sqlSystemFunctions, - .sqlDatetimeFunctions, - .sqlSearchStringEscape, - .sqlExtraNameCharacters, - .sqlSupportsColumnAliasing, - .sqlNullPlusNullIsNull, - .sqlSupportsConvert, - .sqlSupportsTableCorrelationNames, - .sqlSupportsDifferentTableCorrelationNames, - .sqlSupportsExpressionsInOrderBy, - .sqlSupportsOrderByUnrelated, - .sqlSupportedGroupBy, - .sqlSupportsLikeEscapeClause, - .sqlSupportsNonNullableColumns, - .sqlSupportedGrammar, - .sqlAnsi92SupportedLevel, - .sqlSupportsIntegrityEnhancementFacility, - .sqlOuterJoinsSupportLevel, - .sqlSchemaTerm, - .sqlProcedureTerm, - .sqlCatalogTerm, - .sqlCatalogAtStart, - .sqlSchemasSupportedActions, - .sqlCatalogsSupportedActions, - .sqlSupportedPositionedCommands, - .sqlSelectForUpdateSupported, - .sqlStoredProceduresSupported, - .sqlSupportedSubqueries, - .sqlCorrelatedSubqueriesSupported, - .sqlSupportedUnions, - .sqlMaxBinaryLiteralLength, - .sqlMaxCharLiteralLength, - .sqlMaxColumnNameLength, - .sqlMaxColumnsInGroupBy, - .sqlMaxColumnsInIndex, - .sqlMaxColumnsInOrderBy, - .sqlMaxColumnsInSelect, - .sqlMaxColumnsInTable, - .sqlMaxConnections, - .sqlMaxCursorNameLength, - .sqlMaxIndexLength, - .sqlDbSchemaNameLength, - .sqlMaxProcedureNameLength, - .sqlMaxCatalogNameLength, - .sqlMaxRowSize, - .sqlMaxRowSizeIncludesBlobs, - .sqlMaxStatementLength, - .sqlMaxStatements, - .sqlMaxTableNameLength, - .sqlMaxTablesInSelect, - .sqlMaxUsernameLength, - .sqlDefaultTransactionIsolation, - .sqlTransactionsSupported, - .sqlSupportedTransactionsIsolationLevels, - .sqlDataDefinitionCausesTransactionCommit, - .sqlDataDefinitionsInTransactionsIgnored, - .sqlSupportedResultSetTypes, - .sqlSupportedConcurrenciesForResultSetUnspecified, - .sqlSupportedConcurrenciesForResultSetForwardOnly, - .sqlSupportedConcurrenciesForResultSetScrollSensitive, - .sqlSupportedConcurrenciesForResultSetScrollInsensitive, - .sqlBatchUpdatesSupported, - .sqlSavepointsSupported, - .sqlNamedParametersSupported, - .sqlLocatorsUpdateCopy, - .sqlStoredFunctionsUsingCallSyntaxSupported, - ] -} - -#endif // swift(>=4.2) - -/// The level of support for Flight SQL transaction RPCs. -enum Arrow_Flight_Protocol_Sql_SqlSupportedTransaction: SwiftProtobuf.Enum { - typealias RawValue = Int - - /// Unknown/not indicated/no support - case none // = 0 - - /// Transactions, but not savepoints. - /// A savepoint is a mark within a transaction that can be individually - /// rolled back to. Not all databases support savepoints. - case transaction // = 1 - - /// Transactions and savepoints - case savepoint // = 2 - case UNRECOGNIZED(Int) - - init() { - self = .none - } - - init?(rawValue: Int) { - switch rawValue { - case 0: self = .none - case 1: self = .transaction - case 2: self = .savepoint - default: self = .UNRECOGNIZED(rawValue) - } - } - - var rawValue: Int { - switch self { - case .none: return 0 - case .transaction: return 1 - case .savepoint: return 2 - case .UNRECOGNIZED(let i): return i - } - } - -} - -#if swift(>=4.2) - -extension Arrow_Flight_Protocol_Sql_SqlSupportedTransaction: CaseIterable { - // The compiler won't synthesize support with the UNRECOGNIZED case. - static var allCases: [Arrow_Flight_Protocol_Sql_SqlSupportedTransaction] = [ - .none, - .transaction, - .savepoint, - ] -} - -#endif // swift(>=4.2) - -enum Arrow_Flight_Protocol_Sql_SqlSupportedCaseSensitivity: SwiftProtobuf.Enum { - typealias RawValue = Int - case sqlCaseSensitivityUnknown // = 0 - case sqlCaseSensitivityCaseInsensitive // = 1 - case sqlCaseSensitivityUppercase // = 2 - case sqlCaseSensitivityLowercase // = 3 - case UNRECOGNIZED(Int) - - init() { - self = .sqlCaseSensitivityUnknown - } - - init?(rawValue: Int) { - switch rawValue { - case 0: self = .sqlCaseSensitivityUnknown - case 1: self = .sqlCaseSensitivityCaseInsensitive - case 2: self = .sqlCaseSensitivityUppercase - case 3: self = .sqlCaseSensitivityLowercase - default: self = .UNRECOGNIZED(rawValue) - } - } - - var rawValue: Int { - switch self { - case .sqlCaseSensitivityUnknown: return 0 - case .sqlCaseSensitivityCaseInsensitive: return 1 - case .sqlCaseSensitivityUppercase: return 2 - case .sqlCaseSensitivityLowercase: return 3 - case .UNRECOGNIZED(let i): return i - } - } - -} - -#if swift(>=4.2) - -extension Arrow_Flight_Protocol_Sql_SqlSupportedCaseSensitivity: CaseIterable { - // The compiler won't synthesize support with the UNRECOGNIZED case. - static var allCases: [Arrow_Flight_Protocol_Sql_SqlSupportedCaseSensitivity] = [ - .sqlCaseSensitivityUnknown, - .sqlCaseSensitivityCaseInsensitive, - .sqlCaseSensitivityUppercase, - .sqlCaseSensitivityLowercase, - ] -} - -#endif // swift(>=4.2) - -enum Arrow_Flight_Protocol_Sql_SqlNullOrdering: SwiftProtobuf.Enum { - typealias RawValue = Int - case sqlNullsSortedHigh // = 0 - case sqlNullsSortedLow // = 1 - case sqlNullsSortedAtStart // = 2 - case sqlNullsSortedAtEnd // = 3 - case UNRECOGNIZED(Int) - - init() { - self = .sqlNullsSortedHigh - } - - init?(rawValue: Int) { - switch rawValue { - case 0: self = .sqlNullsSortedHigh - case 1: self = .sqlNullsSortedLow - case 2: self = .sqlNullsSortedAtStart - case 3: self = .sqlNullsSortedAtEnd - default: self = .UNRECOGNIZED(rawValue) - } - } - - var rawValue: Int { - switch self { - case .sqlNullsSortedHigh: return 0 - case .sqlNullsSortedLow: return 1 - case .sqlNullsSortedAtStart: return 2 - case .sqlNullsSortedAtEnd: return 3 - case .UNRECOGNIZED(let i): return i - } - } - -} - -#if swift(>=4.2) - -extension Arrow_Flight_Protocol_Sql_SqlNullOrdering: CaseIterable { - // The compiler won't synthesize support with the UNRECOGNIZED case. - static var allCases: [Arrow_Flight_Protocol_Sql_SqlNullOrdering] = [ - .sqlNullsSortedHigh, - .sqlNullsSortedLow, - .sqlNullsSortedAtStart, - .sqlNullsSortedAtEnd, - ] -} - -#endif // swift(>=4.2) - -enum Arrow_Flight_Protocol_Sql_SupportedSqlGrammar: SwiftProtobuf.Enum { - typealias RawValue = Int - case sqlMinimumGrammar // = 0 - case sqlCoreGrammar // = 1 - case sqlExtendedGrammar // = 2 - case UNRECOGNIZED(Int) - - init() { - self = .sqlMinimumGrammar - } - - init?(rawValue: Int) { - switch rawValue { - case 0: self = .sqlMinimumGrammar - case 1: self = .sqlCoreGrammar - case 2: self = .sqlExtendedGrammar - default: self = .UNRECOGNIZED(rawValue) - } - } - - var rawValue: Int { - switch self { - case .sqlMinimumGrammar: return 0 - case .sqlCoreGrammar: return 1 - case .sqlExtendedGrammar: return 2 - case .UNRECOGNIZED(let i): return i - } - } - -} - -#if swift(>=4.2) - -extension Arrow_Flight_Protocol_Sql_SupportedSqlGrammar: CaseIterable { - // The compiler won't synthesize support with the UNRECOGNIZED case. - static var allCases: [Arrow_Flight_Protocol_Sql_SupportedSqlGrammar] = [ - .sqlMinimumGrammar, - .sqlCoreGrammar, - .sqlExtendedGrammar, - ] -} - -#endif // swift(>=4.2) - -enum Arrow_Flight_Protocol_Sql_SupportedAnsi92SqlGrammarLevel: SwiftProtobuf.Enum { - typealias RawValue = Int - case ansi92EntrySql // = 0 - case ansi92IntermediateSql // = 1 - case ansi92FullSql // = 2 - case UNRECOGNIZED(Int) - - init() { - self = .ansi92EntrySql - } - - init?(rawValue: Int) { - switch rawValue { - case 0: self = .ansi92EntrySql - case 1: self = .ansi92IntermediateSql - case 2: self = .ansi92FullSql - default: self = .UNRECOGNIZED(rawValue) - } - } - - var rawValue: Int { - switch self { - case .ansi92EntrySql: return 0 - case .ansi92IntermediateSql: return 1 - case .ansi92FullSql: return 2 - case .UNRECOGNIZED(let i): return i - } - } - -} - -#if swift(>=4.2) - -extension Arrow_Flight_Protocol_Sql_SupportedAnsi92SqlGrammarLevel: CaseIterable { - // The compiler won't synthesize support with the UNRECOGNIZED case. - static var allCases: [Arrow_Flight_Protocol_Sql_SupportedAnsi92SqlGrammarLevel] = [ - .ansi92EntrySql, - .ansi92IntermediateSql, - .ansi92FullSql, - ] -} - -#endif // swift(>=4.2) - -enum Arrow_Flight_Protocol_Sql_SqlOuterJoinsSupportLevel: SwiftProtobuf.Enum { - typealias RawValue = Int - case sqlJoinsUnsupported // = 0 - case sqlLimitedOuterJoins // = 1 - case sqlFullOuterJoins // = 2 - case UNRECOGNIZED(Int) - - init() { - self = .sqlJoinsUnsupported - } - - init?(rawValue: Int) { - switch rawValue { - case 0: self = .sqlJoinsUnsupported - case 1: self = .sqlLimitedOuterJoins - case 2: self = .sqlFullOuterJoins - default: self = .UNRECOGNIZED(rawValue) - } - } - - var rawValue: Int { - switch self { - case .sqlJoinsUnsupported: return 0 - case .sqlLimitedOuterJoins: return 1 - case .sqlFullOuterJoins: return 2 - case .UNRECOGNIZED(let i): return i - } - } - -} - -#if swift(>=4.2) - -extension Arrow_Flight_Protocol_Sql_SqlOuterJoinsSupportLevel: CaseIterable { - // The compiler won't synthesize support with the UNRECOGNIZED case. - static var allCases: [Arrow_Flight_Protocol_Sql_SqlOuterJoinsSupportLevel] = [ - .sqlJoinsUnsupported, - .sqlLimitedOuterJoins, - .sqlFullOuterJoins, - ] -} - -#endif // swift(>=4.2) - -enum Arrow_Flight_Protocol_Sql_SqlSupportedGroupBy: SwiftProtobuf.Enum { - typealias RawValue = Int - case sqlGroupByUnrelated // = 0 - case sqlGroupByBeyondSelect // = 1 - case UNRECOGNIZED(Int) - - init() { - self = .sqlGroupByUnrelated - } - - init?(rawValue: Int) { - switch rawValue { - case 0: self = .sqlGroupByUnrelated - case 1: self = .sqlGroupByBeyondSelect - default: self = .UNRECOGNIZED(rawValue) - } - } - - var rawValue: Int { - switch self { - case .sqlGroupByUnrelated: return 0 - case .sqlGroupByBeyondSelect: return 1 - case .UNRECOGNIZED(let i): return i - } - } - -} - -#if swift(>=4.2) - -extension Arrow_Flight_Protocol_Sql_SqlSupportedGroupBy: CaseIterable { - // The compiler won't synthesize support with the UNRECOGNIZED case. - static var allCases: [Arrow_Flight_Protocol_Sql_SqlSupportedGroupBy] = [ - .sqlGroupByUnrelated, - .sqlGroupByBeyondSelect, - ] -} - -#endif // swift(>=4.2) - -enum Arrow_Flight_Protocol_Sql_SqlSupportedElementActions: SwiftProtobuf.Enum { - typealias RawValue = Int - case sqlElementInProcedureCalls // = 0 - case sqlElementInIndexDefinitions // = 1 - case sqlElementInPrivilegeDefinitions // = 2 - case UNRECOGNIZED(Int) - - init() { - self = .sqlElementInProcedureCalls - } - - init?(rawValue: Int) { - switch rawValue { - case 0: self = .sqlElementInProcedureCalls - case 1: self = .sqlElementInIndexDefinitions - case 2: self = .sqlElementInPrivilegeDefinitions - default: self = .UNRECOGNIZED(rawValue) - } - } - - var rawValue: Int { - switch self { - case .sqlElementInProcedureCalls: return 0 - case .sqlElementInIndexDefinitions: return 1 - case .sqlElementInPrivilegeDefinitions: return 2 - case .UNRECOGNIZED(let i): return i - } - } - -} - -#if swift(>=4.2) - -extension Arrow_Flight_Protocol_Sql_SqlSupportedElementActions: CaseIterable { - // The compiler won't synthesize support with the UNRECOGNIZED case. - static var allCases: [Arrow_Flight_Protocol_Sql_SqlSupportedElementActions] = [ - .sqlElementInProcedureCalls, - .sqlElementInIndexDefinitions, - .sqlElementInPrivilegeDefinitions, - ] -} - -#endif // swift(>=4.2) - -enum Arrow_Flight_Protocol_Sql_SqlSupportedPositionedCommands: SwiftProtobuf.Enum { - typealias RawValue = Int - case sqlPositionedDelete // = 0 - case sqlPositionedUpdate // = 1 - case UNRECOGNIZED(Int) - - init() { - self = .sqlPositionedDelete - } - - init?(rawValue: Int) { - switch rawValue { - case 0: self = .sqlPositionedDelete - case 1: self = .sqlPositionedUpdate - default: self = .UNRECOGNIZED(rawValue) - } - } - - var rawValue: Int { - switch self { - case .sqlPositionedDelete: return 0 - case .sqlPositionedUpdate: return 1 - case .UNRECOGNIZED(let i): return i - } - } - -} - -#if swift(>=4.2) - -extension Arrow_Flight_Protocol_Sql_SqlSupportedPositionedCommands: CaseIterable { - // The compiler won't synthesize support with the UNRECOGNIZED case. - static var allCases: [Arrow_Flight_Protocol_Sql_SqlSupportedPositionedCommands] = [ - .sqlPositionedDelete, - .sqlPositionedUpdate, - ] -} - -#endif // swift(>=4.2) - -enum Arrow_Flight_Protocol_Sql_SqlSupportedSubqueries: SwiftProtobuf.Enum { - typealias RawValue = Int - case sqlSubqueriesInComparisons // = 0 - case sqlSubqueriesInExists // = 1 - case sqlSubqueriesInIns // = 2 - case sqlSubqueriesInQuantifieds // = 3 - case UNRECOGNIZED(Int) - - init() { - self = .sqlSubqueriesInComparisons - } - - init?(rawValue: Int) { - switch rawValue { - case 0: self = .sqlSubqueriesInComparisons - case 1: self = .sqlSubqueriesInExists - case 2: self = .sqlSubqueriesInIns - case 3: self = .sqlSubqueriesInQuantifieds - default: self = .UNRECOGNIZED(rawValue) - } - } - - var rawValue: Int { - switch self { - case .sqlSubqueriesInComparisons: return 0 - case .sqlSubqueriesInExists: return 1 - case .sqlSubqueriesInIns: return 2 - case .sqlSubqueriesInQuantifieds: return 3 - case .UNRECOGNIZED(let i): return i - } - } - -} - -#if swift(>=4.2) - -extension Arrow_Flight_Protocol_Sql_SqlSupportedSubqueries: CaseIterable { - // The compiler won't synthesize support with the UNRECOGNIZED case. - static var allCases: [Arrow_Flight_Protocol_Sql_SqlSupportedSubqueries] = [ - .sqlSubqueriesInComparisons, - .sqlSubqueriesInExists, - .sqlSubqueriesInIns, - .sqlSubqueriesInQuantifieds, - ] -} - -#endif // swift(>=4.2) - -enum Arrow_Flight_Protocol_Sql_SqlSupportedUnions: SwiftProtobuf.Enum { - typealias RawValue = Int - case sqlUnion // = 0 - case sqlUnionAll // = 1 - case UNRECOGNIZED(Int) - - init() { - self = .sqlUnion - } - - init?(rawValue: Int) { - switch rawValue { - case 0: self = .sqlUnion - case 1: self = .sqlUnionAll - default: self = .UNRECOGNIZED(rawValue) - } - } - - var rawValue: Int { - switch self { - case .sqlUnion: return 0 - case .sqlUnionAll: return 1 - case .UNRECOGNIZED(let i): return i - } - } - -} - -#if swift(>=4.2) - -extension Arrow_Flight_Protocol_Sql_SqlSupportedUnions: CaseIterable { - // The compiler won't synthesize support with the UNRECOGNIZED case. - static var allCases: [Arrow_Flight_Protocol_Sql_SqlSupportedUnions] = [ - .sqlUnion, - .sqlUnionAll, - ] -} - -#endif // swift(>=4.2) - -enum Arrow_Flight_Protocol_Sql_SqlTransactionIsolationLevel: SwiftProtobuf.Enum { - typealias RawValue = Int - case sqlTransactionNone // = 0 - case sqlTransactionReadUncommitted // = 1 - case sqlTransactionReadCommitted // = 2 - case sqlTransactionRepeatableRead // = 3 - case sqlTransactionSerializable // = 4 - case UNRECOGNIZED(Int) - - init() { - self = .sqlTransactionNone - } - - init?(rawValue: Int) { - switch rawValue { - case 0: self = .sqlTransactionNone - case 1: self = .sqlTransactionReadUncommitted - case 2: self = .sqlTransactionReadCommitted - case 3: self = .sqlTransactionRepeatableRead - case 4: self = .sqlTransactionSerializable - default: self = .UNRECOGNIZED(rawValue) - } - } - - var rawValue: Int { - switch self { - case .sqlTransactionNone: return 0 - case .sqlTransactionReadUncommitted: return 1 - case .sqlTransactionReadCommitted: return 2 - case .sqlTransactionRepeatableRead: return 3 - case .sqlTransactionSerializable: return 4 - case .UNRECOGNIZED(let i): return i - } - } - -} - -#if swift(>=4.2) - -extension Arrow_Flight_Protocol_Sql_SqlTransactionIsolationLevel: CaseIterable { - // The compiler won't synthesize support with the UNRECOGNIZED case. - static var allCases: [Arrow_Flight_Protocol_Sql_SqlTransactionIsolationLevel] = [ - .sqlTransactionNone, - .sqlTransactionReadUncommitted, - .sqlTransactionReadCommitted, - .sqlTransactionRepeatableRead, - .sqlTransactionSerializable, - ] -} - -#endif // swift(>=4.2) - -enum Arrow_Flight_Protocol_Sql_SqlSupportedTransactions: SwiftProtobuf.Enum { - typealias RawValue = Int - case sqlTransactionUnspecified // = 0 - case sqlDataDefinitionTransactions // = 1 - case sqlDataManipulationTransactions // = 2 - case UNRECOGNIZED(Int) - - init() { - self = .sqlTransactionUnspecified - } - - init?(rawValue: Int) { - switch rawValue { - case 0: self = .sqlTransactionUnspecified - case 1: self = .sqlDataDefinitionTransactions - case 2: self = .sqlDataManipulationTransactions - default: self = .UNRECOGNIZED(rawValue) - } - } - - var rawValue: Int { - switch self { - case .sqlTransactionUnspecified: return 0 - case .sqlDataDefinitionTransactions: return 1 - case .sqlDataManipulationTransactions: return 2 - case .UNRECOGNIZED(let i): return i - } - } - -} - -#if swift(>=4.2) - -extension Arrow_Flight_Protocol_Sql_SqlSupportedTransactions: CaseIterable { - // The compiler won't synthesize support with the UNRECOGNIZED case. - static var allCases: [Arrow_Flight_Protocol_Sql_SqlSupportedTransactions] = [ - .sqlTransactionUnspecified, - .sqlDataDefinitionTransactions, - .sqlDataManipulationTransactions, - ] -} - -#endif // swift(>=4.2) - -enum Arrow_Flight_Protocol_Sql_SqlSupportedResultSetType: SwiftProtobuf.Enum { - typealias RawValue = Int - case sqlResultSetTypeUnspecified // = 0 - case sqlResultSetTypeForwardOnly // = 1 - case sqlResultSetTypeScrollInsensitive // = 2 - case sqlResultSetTypeScrollSensitive // = 3 - case UNRECOGNIZED(Int) - - init() { - self = .sqlResultSetTypeUnspecified - } - - init?(rawValue: Int) { - switch rawValue { - case 0: self = .sqlResultSetTypeUnspecified - case 1: self = .sqlResultSetTypeForwardOnly - case 2: self = .sqlResultSetTypeScrollInsensitive - case 3: self = .sqlResultSetTypeScrollSensitive - default: self = .UNRECOGNIZED(rawValue) - } - } - - var rawValue: Int { - switch self { - case .sqlResultSetTypeUnspecified: return 0 - case .sqlResultSetTypeForwardOnly: return 1 - case .sqlResultSetTypeScrollInsensitive: return 2 - case .sqlResultSetTypeScrollSensitive: return 3 - case .UNRECOGNIZED(let i): return i - } - } - -} - -#if swift(>=4.2) - -extension Arrow_Flight_Protocol_Sql_SqlSupportedResultSetType: CaseIterable { - // The compiler won't synthesize support with the UNRECOGNIZED case. - static var allCases: [Arrow_Flight_Protocol_Sql_SqlSupportedResultSetType] = [ - .sqlResultSetTypeUnspecified, - .sqlResultSetTypeForwardOnly, - .sqlResultSetTypeScrollInsensitive, - .sqlResultSetTypeScrollSensitive, - ] -} - -#endif // swift(>=4.2) - -enum Arrow_Flight_Protocol_Sql_SqlSupportedResultSetConcurrency: SwiftProtobuf.Enum { - typealias RawValue = Int - case sqlResultSetConcurrencyUnspecified // = 0 - case sqlResultSetConcurrencyReadOnly // = 1 - case sqlResultSetConcurrencyUpdatable // = 2 - case UNRECOGNIZED(Int) - - init() { - self = .sqlResultSetConcurrencyUnspecified - } - - init?(rawValue: Int) { - switch rawValue { - case 0: self = .sqlResultSetConcurrencyUnspecified - case 1: self = .sqlResultSetConcurrencyReadOnly - case 2: self = .sqlResultSetConcurrencyUpdatable - default: self = .UNRECOGNIZED(rawValue) - } - } - - var rawValue: Int { - switch self { - case .sqlResultSetConcurrencyUnspecified: return 0 - case .sqlResultSetConcurrencyReadOnly: return 1 - case .sqlResultSetConcurrencyUpdatable: return 2 - case .UNRECOGNIZED(let i): return i - } - } - -} - -#if swift(>=4.2) - -extension Arrow_Flight_Protocol_Sql_SqlSupportedResultSetConcurrency: CaseIterable { - // The compiler won't synthesize support with the UNRECOGNIZED case. - static var allCases: [Arrow_Flight_Protocol_Sql_SqlSupportedResultSetConcurrency] = [ - .sqlResultSetConcurrencyUnspecified, - .sqlResultSetConcurrencyReadOnly, - .sqlResultSetConcurrencyUpdatable, - ] -} - -#endif // swift(>=4.2) - -enum Arrow_Flight_Protocol_Sql_SqlSupportsConvert: SwiftProtobuf.Enum { - typealias RawValue = Int - case sqlConvertBigint // = 0 - case sqlConvertBinary // = 1 - case sqlConvertBit // = 2 - case sqlConvertChar // = 3 - case sqlConvertDate // = 4 - case sqlConvertDecimal // = 5 - case sqlConvertFloat // = 6 - case sqlConvertInteger // = 7 - case sqlConvertIntervalDayTime // = 8 - case sqlConvertIntervalYearMonth // = 9 - case sqlConvertLongvarbinary // = 10 - case sqlConvertLongvarchar // = 11 - case sqlConvertNumeric // = 12 - case sqlConvertReal // = 13 - case sqlConvertSmallint // = 14 - case sqlConvertTime // = 15 - case sqlConvertTimestamp // = 16 - case sqlConvertTinyint // = 17 - case sqlConvertVarbinary // = 18 - case sqlConvertVarchar // = 19 - case UNRECOGNIZED(Int) - - init() { - self = .sqlConvertBigint - } - - init?(rawValue: Int) { - switch rawValue { - case 0: self = .sqlConvertBigint - case 1: self = .sqlConvertBinary - case 2: self = .sqlConvertBit - case 3: self = .sqlConvertChar - case 4: self = .sqlConvertDate - case 5: self = .sqlConvertDecimal - case 6: self = .sqlConvertFloat - case 7: self = .sqlConvertInteger - case 8: self = .sqlConvertIntervalDayTime - case 9: self = .sqlConvertIntervalYearMonth - case 10: self = .sqlConvertLongvarbinary - case 11: self = .sqlConvertLongvarchar - case 12: self = .sqlConvertNumeric - case 13: self = .sqlConvertReal - case 14: self = .sqlConvertSmallint - case 15: self = .sqlConvertTime - case 16: self = .sqlConvertTimestamp - case 17: self = .sqlConvertTinyint - case 18: self = .sqlConvertVarbinary - case 19: self = .sqlConvertVarchar - default: self = .UNRECOGNIZED(rawValue) - } - } - - var rawValue: Int { - switch self { - case .sqlConvertBigint: return 0 - case .sqlConvertBinary: return 1 - case .sqlConvertBit: return 2 - case .sqlConvertChar: return 3 - case .sqlConvertDate: return 4 - case .sqlConvertDecimal: return 5 - case .sqlConvertFloat: return 6 - case .sqlConvertInteger: return 7 - case .sqlConvertIntervalDayTime: return 8 - case .sqlConvertIntervalYearMonth: return 9 - case .sqlConvertLongvarbinary: return 10 - case .sqlConvertLongvarchar: return 11 - case .sqlConvertNumeric: return 12 - case .sqlConvertReal: return 13 - case .sqlConvertSmallint: return 14 - case .sqlConvertTime: return 15 - case .sqlConvertTimestamp: return 16 - case .sqlConvertTinyint: return 17 - case .sqlConvertVarbinary: return 18 - case .sqlConvertVarchar: return 19 - case .UNRECOGNIZED(let i): return i - } - } - -} - -#if swift(>=4.2) - -extension Arrow_Flight_Protocol_Sql_SqlSupportsConvert: CaseIterable { - // The compiler won't synthesize support with the UNRECOGNIZED case. - static var allCases: [Arrow_Flight_Protocol_Sql_SqlSupportsConvert] = [ - .sqlConvertBigint, - .sqlConvertBinary, - .sqlConvertBit, - .sqlConvertChar, - .sqlConvertDate, - .sqlConvertDecimal, - .sqlConvertFloat, - .sqlConvertInteger, - .sqlConvertIntervalDayTime, - .sqlConvertIntervalYearMonth, - .sqlConvertLongvarbinary, - .sqlConvertLongvarchar, - .sqlConvertNumeric, - .sqlConvertReal, - .sqlConvertSmallint, - .sqlConvertTime, - .sqlConvertTimestamp, - .sqlConvertTinyint, - .sqlConvertVarbinary, - .sqlConvertVarchar, - ] -} - -#endif // swift(>=4.2) - -///* -/// The JDBC/ODBC-defined type of any object. -/// All the values here are the same as in the JDBC and ODBC specs. -enum Arrow_Flight_Protocol_Sql_XdbcDataType: SwiftProtobuf.Enum { - typealias RawValue = Int - case xdbcUnknownType // = 0 - case xdbcChar // = 1 - case xdbcNumeric // = 2 - case xdbcDecimal // = 3 - case xdbcInteger // = 4 - case xdbcSmallint // = 5 - case xdbcFloat // = 6 - case xdbcReal // = 7 - case xdbcDouble // = 8 - case xdbcDatetime // = 9 - case xdbcInterval // = 10 - case xdbcVarchar // = 12 - case xdbcDate // = 91 - case xdbcTime // = 92 - case xdbcTimestamp // = 93 - case xdbcLongvarchar // = -1 - case xdbcBinary // = -2 - case xdbcVarbinary // = -3 - case xdbcLongvarbinary // = -4 - case xdbcBigint // = -5 - case xdbcTinyint // = -6 - case xdbcBit // = -7 - case xdbcWchar // = -8 - case xdbcWvarchar // = -9 - case UNRECOGNIZED(Int) - - init() { - self = .xdbcUnknownType - } - - init?(rawValue: Int) { - switch rawValue { - case -9: self = .xdbcWvarchar - case -8: self = .xdbcWchar - case -7: self = .xdbcBit - case -6: self = .xdbcTinyint - case -5: self = .xdbcBigint - case -4: self = .xdbcLongvarbinary - case -3: self = .xdbcVarbinary - case -2: self = .xdbcBinary - case -1: self = .xdbcLongvarchar - case 0: self = .xdbcUnknownType - case 1: self = .xdbcChar - case 2: self = .xdbcNumeric - case 3: self = .xdbcDecimal - case 4: self = .xdbcInteger - case 5: self = .xdbcSmallint - case 6: self = .xdbcFloat - case 7: self = .xdbcReal - case 8: self = .xdbcDouble - case 9: self = .xdbcDatetime - case 10: self = .xdbcInterval - case 12: self = .xdbcVarchar - case 91: self = .xdbcDate - case 92: self = .xdbcTime - case 93: self = .xdbcTimestamp - default: self = .UNRECOGNIZED(rawValue) - } - } - - var rawValue: Int { - switch self { - case .xdbcWvarchar: return -9 - case .xdbcWchar: return -8 - case .xdbcBit: return -7 - case .xdbcTinyint: return -6 - case .xdbcBigint: return -5 - case .xdbcLongvarbinary: return -4 - case .xdbcVarbinary: return -3 - case .xdbcBinary: return -2 - case .xdbcLongvarchar: return -1 - case .xdbcUnknownType: return 0 - case .xdbcChar: return 1 - case .xdbcNumeric: return 2 - case .xdbcDecimal: return 3 - case .xdbcInteger: return 4 - case .xdbcSmallint: return 5 - case .xdbcFloat: return 6 - case .xdbcReal: return 7 - case .xdbcDouble: return 8 - case .xdbcDatetime: return 9 - case .xdbcInterval: return 10 - case .xdbcVarchar: return 12 - case .xdbcDate: return 91 - case .xdbcTime: return 92 - case .xdbcTimestamp: return 93 - case .UNRECOGNIZED(let i): return i - } - } - -} - -#if swift(>=4.2) - -extension Arrow_Flight_Protocol_Sql_XdbcDataType: CaseIterable { - // The compiler won't synthesize support with the UNRECOGNIZED case. - static var allCases: [Arrow_Flight_Protocol_Sql_XdbcDataType] = [ - .xdbcUnknownType, - .xdbcChar, - .xdbcNumeric, - .xdbcDecimal, - .xdbcInteger, - .xdbcSmallint, - .xdbcFloat, - .xdbcReal, - .xdbcDouble, - .xdbcDatetime, - .xdbcInterval, - .xdbcVarchar, - .xdbcDate, - .xdbcTime, - .xdbcTimestamp, - .xdbcLongvarchar, - .xdbcBinary, - .xdbcVarbinary, - .xdbcLongvarbinary, - .xdbcBigint, - .xdbcTinyint, - .xdbcBit, - .xdbcWchar, - .xdbcWvarchar, - ] -} - -#endif // swift(>=4.2) - -///* -/// Detailed subtype information for XDBC_TYPE_DATETIME and XDBC_TYPE_INTERVAL. -enum Arrow_Flight_Protocol_Sql_XdbcDatetimeSubcode: SwiftProtobuf.Enum { - typealias RawValue = Int - case xdbcSubcodeUnknown // = 0 - case xdbcSubcodeYear // = 1 - static let xdbcSubcodeDate = xdbcSubcodeYear - case xdbcSubcodeTime // = 2 - static let xdbcSubcodeMonth = xdbcSubcodeTime - case xdbcSubcodeTimestamp // = 3 - static let xdbcSubcodeDay = xdbcSubcodeTimestamp - case xdbcSubcodeTimeWithTimezone // = 4 - static let xdbcSubcodeHour = xdbcSubcodeTimeWithTimezone - case xdbcSubcodeTimestampWithTimezone // = 5 - static let xdbcSubcodeMinute = xdbcSubcodeTimestampWithTimezone - case xdbcSubcodeSecond // = 6 - case xdbcSubcodeYearToMonth // = 7 - case xdbcSubcodeDayToHour // = 8 - case xdbcSubcodeDayToMinute // = 9 - case xdbcSubcodeDayToSecond // = 10 - case xdbcSubcodeHourToMinute // = 11 - case xdbcSubcodeHourToSecond // = 12 - case xdbcSubcodeMinuteToSecond // = 13 - case xdbcSubcodeIntervalYear // = 101 - case xdbcSubcodeIntervalMonth // = 102 - case xdbcSubcodeIntervalDay // = 103 - case xdbcSubcodeIntervalHour // = 104 - case xdbcSubcodeIntervalMinute // = 105 - case xdbcSubcodeIntervalSecond // = 106 - case xdbcSubcodeIntervalYearToMonth // = 107 - case xdbcSubcodeIntervalDayToHour // = 108 - case xdbcSubcodeIntervalDayToMinute // = 109 - case xdbcSubcodeIntervalDayToSecond // = 110 - case xdbcSubcodeIntervalHourToMinute // = 111 - case xdbcSubcodeIntervalHourToSecond // = 112 - case xdbcSubcodeIntervalMinuteToSecond // = 113 - case UNRECOGNIZED(Int) - - init() { - self = .xdbcSubcodeUnknown - } - - init?(rawValue: Int) { - switch rawValue { - case 0: self = .xdbcSubcodeUnknown - case 1: self = .xdbcSubcodeYear - case 2: self = .xdbcSubcodeTime - case 3: self = .xdbcSubcodeTimestamp - case 4: self = .xdbcSubcodeTimeWithTimezone - case 5: self = .xdbcSubcodeTimestampWithTimezone - case 6: self = .xdbcSubcodeSecond - case 7: self = .xdbcSubcodeYearToMonth - case 8: self = .xdbcSubcodeDayToHour - case 9: self = .xdbcSubcodeDayToMinute - case 10: self = .xdbcSubcodeDayToSecond - case 11: self = .xdbcSubcodeHourToMinute - case 12: self = .xdbcSubcodeHourToSecond - case 13: self = .xdbcSubcodeMinuteToSecond - case 101: self = .xdbcSubcodeIntervalYear - case 102: self = .xdbcSubcodeIntervalMonth - case 103: self = .xdbcSubcodeIntervalDay - case 104: self = .xdbcSubcodeIntervalHour - case 105: self = .xdbcSubcodeIntervalMinute - case 106: self = .xdbcSubcodeIntervalSecond - case 107: self = .xdbcSubcodeIntervalYearToMonth - case 108: self = .xdbcSubcodeIntervalDayToHour - case 109: self = .xdbcSubcodeIntervalDayToMinute - case 110: self = .xdbcSubcodeIntervalDayToSecond - case 111: self = .xdbcSubcodeIntervalHourToMinute - case 112: self = .xdbcSubcodeIntervalHourToSecond - case 113: self = .xdbcSubcodeIntervalMinuteToSecond - default: self = .UNRECOGNIZED(rawValue) - } - } - - var rawValue: Int { - switch self { - case .xdbcSubcodeUnknown: return 0 - case .xdbcSubcodeYear: return 1 - case .xdbcSubcodeTime: return 2 - case .xdbcSubcodeTimestamp: return 3 - case .xdbcSubcodeTimeWithTimezone: return 4 - case .xdbcSubcodeTimestampWithTimezone: return 5 - case .xdbcSubcodeSecond: return 6 - case .xdbcSubcodeYearToMonth: return 7 - case .xdbcSubcodeDayToHour: return 8 - case .xdbcSubcodeDayToMinute: return 9 - case .xdbcSubcodeDayToSecond: return 10 - case .xdbcSubcodeHourToMinute: return 11 - case .xdbcSubcodeHourToSecond: return 12 - case .xdbcSubcodeMinuteToSecond: return 13 - case .xdbcSubcodeIntervalYear: return 101 - case .xdbcSubcodeIntervalMonth: return 102 - case .xdbcSubcodeIntervalDay: return 103 - case .xdbcSubcodeIntervalHour: return 104 - case .xdbcSubcodeIntervalMinute: return 105 - case .xdbcSubcodeIntervalSecond: return 106 - case .xdbcSubcodeIntervalYearToMonth: return 107 - case .xdbcSubcodeIntervalDayToHour: return 108 - case .xdbcSubcodeIntervalDayToMinute: return 109 - case .xdbcSubcodeIntervalDayToSecond: return 110 - case .xdbcSubcodeIntervalHourToMinute: return 111 - case .xdbcSubcodeIntervalHourToSecond: return 112 - case .xdbcSubcodeIntervalMinuteToSecond: return 113 - case .UNRECOGNIZED(let i): return i - } - } - -} - -#if swift(>=4.2) - -extension Arrow_Flight_Protocol_Sql_XdbcDatetimeSubcode: CaseIterable { - // The compiler won't synthesize support with the UNRECOGNIZED case. - static var allCases: [Arrow_Flight_Protocol_Sql_XdbcDatetimeSubcode] = [ - .xdbcSubcodeUnknown, - .xdbcSubcodeYear, - .xdbcSubcodeTime, - .xdbcSubcodeTimestamp, - .xdbcSubcodeTimeWithTimezone, - .xdbcSubcodeTimestampWithTimezone, - .xdbcSubcodeSecond, - .xdbcSubcodeYearToMonth, - .xdbcSubcodeDayToHour, - .xdbcSubcodeDayToMinute, - .xdbcSubcodeDayToSecond, - .xdbcSubcodeHourToMinute, - .xdbcSubcodeHourToSecond, - .xdbcSubcodeMinuteToSecond, - .xdbcSubcodeIntervalYear, - .xdbcSubcodeIntervalMonth, - .xdbcSubcodeIntervalDay, - .xdbcSubcodeIntervalHour, - .xdbcSubcodeIntervalMinute, - .xdbcSubcodeIntervalSecond, - .xdbcSubcodeIntervalYearToMonth, - .xdbcSubcodeIntervalDayToHour, - .xdbcSubcodeIntervalDayToMinute, - .xdbcSubcodeIntervalDayToSecond, - .xdbcSubcodeIntervalHourToMinute, - .xdbcSubcodeIntervalHourToSecond, - .xdbcSubcodeIntervalMinuteToSecond, - ] -} - -#endif // swift(>=4.2) - -enum Arrow_Flight_Protocol_Sql_Nullable: SwiftProtobuf.Enum { - typealias RawValue = Int - - ///* - /// Indicates that the fields does not allow the use of null values. - case nullabilityNoNulls // = 0 - - ///* - /// Indicates that the fields allow the use of null values. - case nullabilityNullable // = 1 - - ///* - /// Indicates that nullability of the fields cannot be determined. - case nullabilityUnknown // = 2 - case UNRECOGNIZED(Int) - - init() { - self = .nullabilityNoNulls - } - - init?(rawValue: Int) { - switch rawValue { - case 0: self = .nullabilityNoNulls - case 1: self = .nullabilityNullable - case 2: self = .nullabilityUnknown - default: self = .UNRECOGNIZED(rawValue) - } - } - - var rawValue: Int { - switch self { - case .nullabilityNoNulls: return 0 - case .nullabilityNullable: return 1 - case .nullabilityUnknown: return 2 - case .UNRECOGNIZED(let i): return i - } - } - -} - -#if swift(>=4.2) - -extension Arrow_Flight_Protocol_Sql_Nullable: CaseIterable { - // The compiler won't synthesize support with the UNRECOGNIZED case. - static var allCases: [Arrow_Flight_Protocol_Sql_Nullable] = [ - .nullabilityNoNulls, - .nullabilityNullable, - .nullabilityUnknown, - ] -} - -#endif // swift(>=4.2) - -enum Arrow_Flight_Protocol_Sql_Searchable: SwiftProtobuf.Enum { - typealias RawValue = Int - - ///* - /// Indicates that column cannot be used in a WHERE clause. - case none // = 0 - - ///* - /// Indicates that the column can be used in a WHERE clause if it is using a - /// LIKE operator. - case char // = 1 - - ///* - /// Indicates that the column can be used In a WHERE clause with any - /// operator other than LIKE. - /// - /// - Allowed operators: comparison, quantified comparison, BETWEEN, - /// DISTINCT, IN, MATCH, and UNIQUE. - case basic // = 2 - - ///* - /// Indicates that the column can be used in a WHERE clause using any operator. - case full // = 3 - case UNRECOGNIZED(Int) - - init() { - self = .none - } - - init?(rawValue: Int) { - switch rawValue { - case 0: self = .none - case 1: self = .char - case 2: self = .basic - case 3: self = .full - default: self = .UNRECOGNIZED(rawValue) - } - } - - var rawValue: Int { - switch self { - case .none: return 0 - case .char: return 1 - case .basic: return 2 - case .full: return 3 - case .UNRECOGNIZED(let i): return i - } - } - -} - -#if swift(>=4.2) - -extension Arrow_Flight_Protocol_Sql_Searchable: CaseIterable { - // The compiler won't synthesize support with the UNRECOGNIZED case. - static var allCases: [Arrow_Flight_Protocol_Sql_Searchable] = [ - .none, - .char, - .basic, - .full, - ] -} - -#endif // swift(>=4.2) - -enum Arrow_Flight_Protocol_Sql_UpdateDeleteRules: SwiftProtobuf.Enum { - typealias RawValue = Int - case cascade // = 0 - case restrict // = 1 - case setNull // = 2 - case noAction // = 3 - case setDefault // = 4 - case UNRECOGNIZED(Int) - - init() { - self = .cascade - } - - init?(rawValue: Int) { - switch rawValue { - case 0: self = .cascade - case 1: self = .restrict - case 2: self = .setNull - case 3: self = .noAction - case 4: self = .setDefault - default: self = .UNRECOGNIZED(rawValue) - } - } - - var rawValue: Int { - switch self { - case .cascade: return 0 - case .restrict: return 1 - case .setNull: return 2 - case .noAction: return 3 - case .setDefault: return 4 - case .UNRECOGNIZED(let i): return i - } - } - -} - -#if swift(>=4.2) - -extension Arrow_Flight_Protocol_Sql_UpdateDeleteRules: CaseIterable { - // The compiler won't synthesize support with the UNRECOGNIZED case. - static var allCases: [Arrow_Flight_Protocol_Sql_UpdateDeleteRules] = [ - .cascade, - .restrict, - .setNull, - .noAction, - .setDefault, - ] -} - -#endif // swift(>=4.2) - -/// -/// Represents a metadata request. Used in the command member of FlightDescriptor -/// for the following RPC calls: -/// - GetSchema: return the Arrow schema of the query. -/// - GetFlightInfo: execute the metadata request. -/// -/// The returned Arrow schema will be: -/// < -/// info_name: uint32 not null, -/// value: dense_union< -/// string_value: utf8, -/// bool_value: bool, -/// bigint_value: int64, -/// int32_bitmask: int32, -/// string_list: list -/// int32_to_int32_list_map: map> -/// > -/// where there is one row per requested piece of metadata information. -struct Arrow_Flight_Protocol_Sql_CommandGetSqlInfo { - // SwiftProtobuf.Message conformance is added in an extension below. See the - // `Message` and `Message+*Additions` files in the SwiftProtobuf library for - // methods supported on all messages. - - /// - /// Values are modelled after ODBC's SQLGetInfo() function. This information is intended to provide - /// Flight SQL clients with basic, SQL syntax and SQL functions related information. - /// More information types can be added in future releases. - /// E.g. more SQL syntax support types, scalar functions support, type conversion support etc. - /// - /// Note that the set of metadata may expand. - /// - /// Initially, Flight SQL will support the following information types: - /// - Server Information - Range [0-500) - /// - Syntax Information - Range [500-1000) - /// Range [0-10,000) is reserved for defaults (see SqlInfo enum for default options). - /// Custom options should start at 10,000. - /// - /// If omitted, then all metadata will be retrieved. - /// Flight SQL Servers may choose to include additional metadata above and beyond the specified set, however they must - /// at least return the specified set. IDs ranging from 0 to 10,000 (exclusive) are reserved for future use. - /// If additional metadata is included, the metadata IDs should start from 10,000. - var info: [UInt32] = [] - - var unknownFields = SwiftProtobuf.UnknownStorage() - - init() {} -} - -/// -/// Represents a request to retrieve information about data type supported on a Flight SQL enabled backend. -/// Used in the command member of FlightDescriptor for the following RPC calls: -/// - GetSchema: return the schema of the query. -/// - GetFlightInfo: execute the catalog metadata request. -/// -/// The returned schema will be: -/// < -/// type_name: utf8 not null (The name of the data type, for example: VARCHAR, INTEGER, etc), -/// data_type: int32 not null (The SQL data type), -/// column_size: int32 (The maximum size supported by that column. -/// In case of exact numeric types, this represents the maximum precision. -/// In case of string types, this represents the character length. -/// In case of datetime data types, this represents the length in characters of the string representation. -/// NULL is returned for data types where column size is not applicable.), -/// literal_prefix: utf8 (Character or characters used to prefix a literal, NULL is returned for -/// data types where a literal prefix is not applicable.), -/// literal_suffix: utf8 (Character or characters used to terminate a literal, -/// NULL is returned for data types where a literal suffix is not applicable.), -/// create_params: list -/// (A list of keywords corresponding to which parameters can be used when creating -/// a column for that specific type. -/// NULL is returned if there are no parameters for the data type definition.), -/// nullable: int32 not null (Shows if the data type accepts a NULL value. The possible values can be seen in the -/// Nullable enum.), -/// case_sensitive: bool not null (Shows if a character data type is case-sensitive in collations and comparisons), -/// searchable: int32 not null (Shows how the data type is used in a WHERE clause. The possible values can be seen in the -/// Searchable enum.), -/// unsigned_attribute: bool (Shows if the data type is unsigned. NULL is returned if the attribute is -/// not applicable to the data type or the data type is not numeric.), -/// fixed_prec_scale: bool not null (Shows if the data type has predefined fixed precision and scale.), -/// auto_increment: bool (Shows if the data type is auto incremental. NULL is returned if the attribute -/// is not applicable to the data type or the data type is not numeric.), -/// local_type_name: utf8 (Localized version of the data source-dependent name of the data type. NULL -/// is returned if a localized name is not supported by the data source), -/// minimum_scale: int32 (The minimum scale of the data type on the data source. -/// If a data type has a fixed scale, the MINIMUM_SCALE and MAXIMUM_SCALE -/// columns both contain this value. NULL is returned if scale is not applicable.), -/// maximum_scale: int32 (The maximum scale of the data type on the data source. -/// NULL is returned if scale is not applicable.), -/// sql_data_type: int32 not null (The value of the SQL DATA TYPE which has the same values -/// as data_type value. Except for interval and datetime, which -/// uses generic values. More info about those types can be -/// obtained through datetime_subcode. The possible values can be seen -/// in the XdbcDataType enum.), -/// datetime_subcode: int32 (Only used when the SQL DATA TYPE is interval or datetime. It contains -/// its sub types. For type different from interval and datetime, this value -/// is NULL. The possible values can be seen in the XdbcDatetimeSubcode enum.), -/// num_prec_radix: int32 (If the data type is an approximate numeric type, this column contains -/// the value 2 to indicate that COLUMN_SIZE specifies a number of bits. For -/// exact numeric types, this column contains the value 10 to indicate that -/// column size specifies a number of decimal digits. Otherwise, this column is NULL.), -/// interval_precision: int32 (If the data type is an interval data type, then this column contains the value -/// of the interval leading precision. Otherwise, this column is NULL. This fields -/// is only relevant to be used by ODBC). -/// > -/// The returned data should be ordered by data_type and then by type_name. -struct Arrow_Flight_Protocol_Sql_CommandGetXdbcTypeInfo { - // SwiftProtobuf.Message conformance is added in an extension below. See the - // `Message` and `Message+*Additions` files in the SwiftProtobuf library for - // methods supported on all messages. - - /// - /// Specifies the data type to search for the info. - var dataType: Int32 { - get {return _dataType ?? 0} - set {_dataType = newValue} - } - /// Returns true if `dataType` has been explicitly set. - var hasDataType: Bool {return self._dataType != nil} - /// Clears the value of `dataType`. Subsequent reads from it will return its default value. - mutating func clearDataType() {self._dataType = nil} - - var unknownFields = SwiftProtobuf.UnknownStorage() - - init() {} - - fileprivate var _dataType: Int32? = nil -} - -/// -/// Represents a request to retrieve the list of catalogs on a Flight SQL enabled backend. -/// The definition of a catalog depends on vendor/implementation. It is usually the database itself -/// Used in the command member of FlightDescriptor for the following RPC calls: -/// - GetSchema: return the Arrow schema of the query. -/// - GetFlightInfo: execute the catalog metadata request. -/// -/// The returned Arrow schema will be: -/// < -/// catalog_name: utf8 not null -/// > -/// The returned data should be ordered by catalog_name. -struct Arrow_Flight_Protocol_Sql_CommandGetCatalogs { - // SwiftProtobuf.Message conformance is added in an extension below. See the - // `Message` and `Message+*Additions` files in the SwiftProtobuf library for - // methods supported on all messages. - - var unknownFields = SwiftProtobuf.UnknownStorage() - - init() {} -} - -/// -/// Represents a request to retrieve the list of database schemas on a Flight SQL enabled backend. -/// The definition of a database schema depends on vendor/implementation. It is usually a collection of tables. -/// Used in the command member of FlightDescriptor for the following RPC calls: -/// - GetSchema: return the Arrow schema of the query. -/// - GetFlightInfo: execute the catalog metadata request. -/// -/// The returned Arrow schema will be: -/// < -/// catalog_name: utf8, -/// db_schema_name: utf8 not null -/// > -/// The returned data should be ordered by catalog_name, then db_schema_name. -struct Arrow_Flight_Protocol_Sql_CommandGetDbSchemas { - // SwiftProtobuf.Message conformance is added in an extension below. See the - // `Message` and `Message+*Additions` files in the SwiftProtobuf library for - // methods supported on all messages. - - /// - /// Specifies the Catalog to search for the tables. - /// An empty string retrieves those without a catalog. - /// If omitted the catalog name should not be used to narrow the search. - var catalog: String { - get {return _catalog ?? String()} - set {_catalog = newValue} - } - /// Returns true if `catalog` has been explicitly set. - var hasCatalog: Bool {return self._catalog != nil} - /// Clears the value of `catalog`. Subsequent reads from it will return its default value. - mutating func clearCatalog() {self._catalog = nil} - - /// - /// Specifies a filter pattern for schemas to search for. - /// When no db_schema_filter_pattern is provided, the pattern will not be used to narrow the search. - /// In the pattern string, two special characters can be used to denote matching rules: - /// - "%" means to match any substring with 0 or more characters. - /// - "_" means to match any one character. - var dbSchemaFilterPattern: String { - get {return _dbSchemaFilterPattern ?? String()} - set {_dbSchemaFilterPattern = newValue} - } - /// Returns true if `dbSchemaFilterPattern` has been explicitly set. - var hasDbSchemaFilterPattern: Bool {return self._dbSchemaFilterPattern != nil} - /// Clears the value of `dbSchemaFilterPattern`. Subsequent reads from it will return its default value. - mutating func clearDbSchemaFilterPattern() {self._dbSchemaFilterPattern = nil} - - var unknownFields = SwiftProtobuf.UnknownStorage() - - init() {} - - fileprivate var _catalog: String? = nil - fileprivate var _dbSchemaFilterPattern: String? = nil -} - -/// -/// Represents a request to retrieve the list of tables, and optionally their schemas, on a Flight SQL enabled backend. -/// Used in the command member of FlightDescriptor for the following RPC calls: -/// - GetSchema: return the Arrow schema of the query. -/// - GetFlightInfo: execute the catalog metadata request. -/// -/// The returned Arrow schema will be: -/// < -/// catalog_name: utf8, -/// db_schema_name: utf8, -/// table_name: utf8 not null, -/// table_type: utf8 not null, -/// [optional] table_schema: bytes not null (schema of the table as described in Schema.fbs::Schema, -/// it is serialized as an IPC message.) -/// > -/// Fields on table_schema may contain the following metadata: -/// - ARROW:FLIGHT:SQL:CATALOG_NAME - Table's catalog name -/// - ARROW:FLIGHT:SQL:DB_SCHEMA_NAME - Database schema name -/// - ARROW:FLIGHT:SQL:TABLE_NAME - Table name -/// - ARROW:FLIGHT:SQL:TYPE_NAME - The data source-specific name for the data type of the column. -/// - ARROW:FLIGHT:SQL:PRECISION - Column precision/size -/// - ARROW:FLIGHT:SQL:SCALE - Column scale/decimal digits if applicable -/// - ARROW:FLIGHT:SQL:IS_AUTO_INCREMENT - "1" indicates if the column is auto incremented, "0" otherwise. -/// - ARROW:FLIGHT:SQL:IS_CASE_SENSITIVE - "1" indicates if the column is case-sensitive, "0" otherwise. -/// - ARROW:FLIGHT:SQL:IS_READ_ONLY - "1" indicates if the column is read only, "0" otherwise. -/// - ARROW:FLIGHT:SQL:IS_SEARCHABLE - "1" indicates if the column is searchable via WHERE clause, "0" otherwise. -/// The returned data should be ordered by catalog_name, db_schema_name, table_name, then table_type, followed by table_schema if requested. -struct Arrow_Flight_Protocol_Sql_CommandGetTables { - // SwiftProtobuf.Message conformance is added in an extension below. See the - // `Message` and `Message+*Additions` files in the SwiftProtobuf library for - // methods supported on all messages. - - /// - /// Specifies the Catalog to search for the tables. - /// An empty string retrieves those without a catalog. - /// If omitted the catalog name should not be used to narrow the search. - var catalog: String { - get {return _catalog ?? String()} - set {_catalog = newValue} - } - /// Returns true if `catalog` has been explicitly set. - var hasCatalog: Bool {return self._catalog != nil} - /// Clears the value of `catalog`. Subsequent reads from it will return its default value. - mutating func clearCatalog() {self._catalog = nil} - - /// - /// Specifies a filter pattern for schemas to search for. - /// When no db_schema_filter_pattern is provided, all schemas matching other filters are searched. - /// In the pattern string, two special characters can be used to denote matching rules: - /// - "%" means to match any substring with 0 or more characters. - /// - "_" means to match any one character. - var dbSchemaFilterPattern: String { - get {return _dbSchemaFilterPattern ?? String()} - set {_dbSchemaFilterPattern = newValue} - } - /// Returns true if `dbSchemaFilterPattern` has been explicitly set. - var hasDbSchemaFilterPattern: Bool {return self._dbSchemaFilterPattern != nil} - /// Clears the value of `dbSchemaFilterPattern`. Subsequent reads from it will return its default value. - mutating func clearDbSchemaFilterPattern() {self._dbSchemaFilterPattern = nil} - - /// - /// Specifies a filter pattern for tables to search for. - /// When no table_name_filter_pattern is provided, all tables matching other filters are searched. - /// In the pattern string, two special characters can be used to denote matching rules: - /// - "%" means to match any substring with 0 or more characters. - /// - "_" means to match any one character. - var tableNameFilterPattern: String { - get {return _tableNameFilterPattern ?? String()} - set {_tableNameFilterPattern = newValue} - } - /// Returns true if `tableNameFilterPattern` has been explicitly set. - var hasTableNameFilterPattern: Bool {return self._tableNameFilterPattern != nil} - /// Clears the value of `tableNameFilterPattern`. Subsequent reads from it will return its default value. - mutating func clearTableNameFilterPattern() {self._tableNameFilterPattern = nil} - - /// - /// Specifies a filter of table types which must match. - /// The table types depend on vendor/implementation. It is usually used to separate tables from views or system tables. - /// TABLE, VIEW, and SYSTEM TABLE are commonly supported. - var tableTypes: [String] = [] - - /// Specifies if the Arrow schema should be returned for found tables. - var includeSchema: Bool = false - - var unknownFields = SwiftProtobuf.UnknownStorage() - - init() {} - - fileprivate var _catalog: String? = nil - fileprivate var _dbSchemaFilterPattern: String? = nil - fileprivate var _tableNameFilterPattern: String? = nil -} - -/// -/// Represents a request to retrieve the list of table types on a Flight SQL enabled backend. -/// The table types depend on vendor/implementation. It is usually used to separate tables from views or system tables. -/// TABLE, VIEW, and SYSTEM TABLE are commonly supported. -/// Used in the command member of FlightDescriptor for the following RPC calls: -/// - GetSchema: return the Arrow schema of the query. -/// - GetFlightInfo: execute the catalog metadata request. -/// -/// The returned Arrow schema will be: -/// < -/// table_type: utf8 not null -/// > -/// The returned data should be ordered by table_type. -struct Arrow_Flight_Protocol_Sql_CommandGetTableTypes { - // SwiftProtobuf.Message conformance is added in an extension below. See the - // `Message` and `Message+*Additions` files in the SwiftProtobuf library for - // methods supported on all messages. - - var unknownFields = SwiftProtobuf.UnknownStorage() - - init() {} -} - -/// -/// Represents a request to retrieve the primary keys of a table on a Flight SQL enabled backend. -/// Used in the command member of FlightDescriptor for the following RPC calls: -/// - GetSchema: return the Arrow schema of the query. -/// - GetFlightInfo: execute the catalog metadata request. -/// -/// The returned Arrow schema will be: -/// < -/// catalog_name: utf8, -/// db_schema_name: utf8, -/// table_name: utf8 not null, -/// column_name: utf8 not null, -/// key_name: utf8, -/// key_sequence: int32 not null -/// > -/// The returned data should be ordered by catalog_name, db_schema_name, table_name, key_name, then key_sequence. -struct Arrow_Flight_Protocol_Sql_CommandGetPrimaryKeys { - // SwiftProtobuf.Message conformance is added in an extension below. See the - // `Message` and `Message+*Additions` files in the SwiftProtobuf library for - // methods supported on all messages. - - /// - /// Specifies the catalog to search for the table. - /// An empty string retrieves those without a catalog. - /// If omitted the catalog name should not be used to narrow the search. - var catalog: String { - get {return _catalog ?? String()} - set {_catalog = newValue} - } - /// Returns true if `catalog` has been explicitly set. - var hasCatalog: Bool {return self._catalog != nil} - /// Clears the value of `catalog`. Subsequent reads from it will return its default value. - mutating func clearCatalog() {self._catalog = nil} - - /// - /// Specifies the schema to search for the table. - /// An empty string retrieves those without a schema. - /// If omitted the schema name should not be used to narrow the search. - var dbSchema: String { - get {return _dbSchema ?? String()} - set {_dbSchema = newValue} - } - /// Returns true if `dbSchema` has been explicitly set. - var hasDbSchema: Bool {return self._dbSchema != nil} - /// Clears the value of `dbSchema`. Subsequent reads from it will return its default value. - mutating func clearDbSchema() {self._dbSchema = nil} - - /// Specifies the table to get the primary keys for. - var table: String = String() - - var unknownFields = SwiftProtobuf.UnknownStorage() - - init() {} - - fileprivate var _catalog: String? = nil - fileprivate var _dbSchema: String? = nil -} - -/// -/// Represents a request to retrieve a description of the foreign key columns that reference the given table's -/// primary key columns (the foreign keys exported by a table) of a table on a Flight SQL enabled backend. -/// Used in the command member of FlightDescriptor for the following RPC calls: -/// - GetSchema: return the Arrow schema of the query. -/// - GetFlightInfo: execute the catalog metadata request. -/// -/// The returned Arrow schema will be: -/// < -/// pk_catalog_name: utf8, -/// pk_db_schema_name: utf8, -/// pk_table_name: utf8 not null, -/// pk_column_name: utf8 not null, -/// fk_catalog_name: utf8, -/// fk_db_schema_name: utf8, -/// fk_table_name: utf8 not null, -/// fk_column_name: utf8 not null, -/// key_sequence: int32 not null, -/// fk_key_name: utf8, -/// pk_key_name: utf8, -/// update_rule: uint8 not null, -/// delete_rule: uint8 not null -/// > -/// The returned data should be ordered by fk_catalog_name, fk_db_schema_name, fk_table_name, fk_key_name, then key_sequence. -/// update_rule and delete_rule returns a byte that is equivalent to actions declared on UpdateDeleteRules enum. -struct Arrow_Flight_Protocol_Sql_CommandGetExportedKeys { - // SwiftProtobuf.Message conformance is added in an extension below. See the - // `Message` and `Message+*Additions` files in the SwiftProtobuf library for - // methods supported on all messages. - - /// - /// Specifies the catalog to search for the foreign key table. - /// An empty string retrieves those without a catalog. - /// If omitted the catalog name should not be used to narrow the search. - var catalog: String { - get {return _catalog ?? String()} - set {_catalog = newValue} - } - /// Returns true if `catalog` has been explicitly set. - var hasCatalog: Bool {return self._catalog != nil} - /// Clears the value of `catalog`. Subsequent reads from it will return its default value. - mutating func clearCatalog() {self._catalog = nil} - - /// - /// Specifies the schema to search for the foreign key table. - /// An empty string retrieves those without a schema. - /// If omitted the schema name should not be used to narrow the search. - var dbSchema: String { - get {return _dbSchema ?? String()} - set {_dbSchema = newValue} - } - /// Returns true if `dbSchema` has been explicitly set. - var hasDbSchema: Bool {return self._dbSchema != nil} - /// Clears the value of `dbSchema`. Subsequent reads from it will return its default value. - mutating func clearDbSchema() {self._dbSchema = nil} - - /// Specifies the foreign key table to get the foreign keys for. - var table: String = String() - - var unknownFields = SwiftProtobuf.UnknownStorage() - - init() {} - - fileprivate var _catalog: String? = nil - fileprivate var _dbSchema: String? = nil -} - -/// -/// Represents a request to retrieve the foreign keys of a table on a Flight SQL enabled backend. -/// Used in the command member of FlightDescriptor for the following RPC calls: -/// - GetSchema: return the Arrow schema of the query. -/// - GetFlightInfo: execute the catalog metadata request. -/// -/// The returned Arrow schema will be: -/// < -/// pk_catalog_name: utf8, -/// pk_db_schema_name: utf8, -/// pk_table_name: utf8 not null, -/// pk_column_name: utf8 not null, -/// fk_catalog_name: utf8, -/// fk_db_schema_name: utf8, -/// fk_table_name: utf8 not null, -/// fk_column_name: utf8 not null, -/// key_sequence: int32 not null, -/// fk_key_name: utf8, -/// pk_key_name: utf8, -/// update_rule: uint8 not null, -/// delete_rule: uint8 not null -/// > -/// The returned data should be ordered by pk_catalog_name, pk_db_schema_name, pk_table_name, pk_key_name, then key_sequence. -/// update_rule and delete_rule returns a byte that is equivalent to actions: -/// - 0 = CASCADE -/// - 1 = RESTRICT -/// - 2 = SET NULL -/// - 3 = NO ACTION -/// - 4 = SET DEFAULT -struct Arrow_Flight_Protocol_Sql_CommandGetImportedKeys { - // SwiftProtobuf.Message conformance is added in an extension below. See the - // `Message` and `Message+*Additions` files in the SwiftProtobuf library for - // methods supported on all messages. - - /// - /// Specifies the catalog to search for the primary key table. - /// An empty string retrieves those without a catalog. - /// If omitted the catalog name should not be used to narrow the search. - var catalog: String { - get {return _catalog ?? String()} - set {_catalog = newValue} - } - /// Returns true if `catalog` has been explicitly set. - var hasCatalog: Bool {return self._catalog != nil} - /// Clears the value of `catalog`. Subsequent reads from it will return its default value. - mutating func clearCatalog() {self._catalog = nil} - - /// - /// Specifies the schema to search for the primary key table. - /// An empty string retrieves those without a schema. - /// If omitted the schema name should not be used to narrow the search. - var dbSchema: String { - get {return _dbSchema ?? String()} - set {_dbSchema = newValue} - } - /// Returns true if `dbSchema` has been explicitly set. - var hasDbSchema: Bool {return self._dbSchema != nil} - /// Clears the value of `dbSchema`. Subsequent reads from it will return its default value. - mutating func clearDbSchema() {self._dbSchema = nil} - - /// Specifies the primary key table to get the foreign keys for. - var table: String = String() - - var unknownFields = SwiftProtobuf.UnknownStorage() - - init() {} - - fileprivate var _catalog: String? = nil - fileprivate var _dbSchema: String? = nil -} - -/// -/// Represents a request to retrieve a description of the foreign key columns in the given foreign key table that -/// reference the primary key or the columns representing a unique constraint of the parent table (could be the same -/// or a different table) on a Flight SQL enabled backend. -/// Used in the command member of FlightDescriptor for the following RPC calls: -/// - GetSchema: return the Arrow schema of the query. -/// - GetFlightInfo: execute the catalog metadata request. -/// -/// The returned Arrow schema will be: -/// < -/// pk_catalog_name: utf8, -/// pk_db_schema_name: utf8, -/// pk_table_name: utf8 not null, -/// pk_column_name: utf8 not null, -/// fk_catalog_name: utf8, -/// fk_db_schema_name: utf8, -/// fk_table_name: utf8 not null, -/// fk_column_name: utf8 not null, -/// key_sequence: int32 not null, -/// fk_key_name: utf8, -/// pk_key_name: utf8, -/// update_rule: uint8 not null, -/// delete_rule: uint8 not null -/// > -/// The returned data should be ordered by pk_catalog_name, pk_db_schema_name, pk_table_name, pk_key_name, then key_sequence. -/// update_rule and delete_rule returns a byte that is equivalent to actions: -/// - 0 = CASCADE -/// - 1 = RESTRICT -/// - 2 = SET NULL -/// - 3 = NO ACTION -/// - 4 = SET DEFAULT -struct Arrow_Flight_Protocol_Sql_CommandGetCrossReference { - // SwiftProtobuf.Message conformance is added in an extension below. See the - // `Message` and `Message+*Additions` files in the SwiftProtobuf library for - // methods supported on all messages. - - ///* - /// The catalog name where the parent table is. - /// An empty string retrieves those without a catalog. - /// If omitted the catalog name should not be used to narrow the search. - var pkCatalog: String { - get {return _pkCatalog ?? String()} - set {_pkCatalog = newValue} - } - /// Returns true if `pkCatalog` has been explicitly set. - var hasPkCatalog: Bool {return self._pkCatalog != nil} - /// Clears the value of `pkCatalog`. Subsequent reads from it will return its default value. - mutating func clearPkCatalog() {self._pkCatalog = nil} - - ///* - /// The Schema name where the parent table is. - /// An empty string retrieves those without a schema. - /// If omitted the schema name should not be used to narrow the search. - var pkDbSchema: String { - get {return _pkDbSchema ?? String()} - set {_pkDbSchema = newValue} - } - /// Returns true if `pkDbSchema` has been explicitly set. - var hasPkDbSchema: Bool {return self._pkDbSchema != nil} - /// Clears the value of `pkDbSchema`. Subsequent reads from it will return its default value. - mutating func clearPkDbSchema() {self._pkDbSchema = nil} - - ///* - /// The parent table name. It cannot be null. - var pkTable: String = String() - - ///* - /// The catalog name where the foreign table is. - /// An empty string retrieves those without a catalog. - /// If omitted the catalog name should not be used to narrow the search. - var fkCatalog: String { - get {return _fkCatalog ?? String()} - set {_fkCatalog = newValue} - } - /// Returns true if `fkCatalog` has been explicitly set. - var hasFkCatalog: Bool {return self._fkCatalog != nil} - /// Clears the value of `fkCatalog`. Subsequent reads from it will return its default value. - mutating func clearFkCatalog() {self._fkCatalog = nil} - - ///* - /// The schema name where the foreign table is. - /// An empty string retrieves those without a schema. - /// If omitted the schema name should not be used to narrow the search. - var fkDbSchema: String { - get {return _fkDbSchema ?? String()} - set {_fkDbSchema = newValue} - } - /// Returns true if `fkDbSchema` has been explicitly set. - var hasFkDbSchema: Bool {return self._fkDbSchema != nil} - /// Clears the value of `fkDbSchema`. Subsequent reads from it will return its default value. - mutating func clearFkDbSchema() {self._fkDbSchema = nil} - - ///* - /// The foreign table name. It cannot be null. - var fkTable: String = String() - - var unknownFields = SwiftProtobuf.UnknownStorage() - - init() {} - - fileprivate var _pkCatalog: String? = nil - fileprivate var _pkDbSchema: String? = nil - fileprivate var _fkCatalog: String? = nil - fileprivate var _fkDbSchema: String? = nil -} - -/// -/// Request message for the "CreatePreparedStatement" action on a Flight SQL enabled backend. -struct Arrow_Flight_Protocol_Sql_ActionCreatePreparedStatementRequest { - // SwiftProtobuf.Message conformance is added in an extension below. See the - // `Message` and `Message+*Additions` files in the SwiftProtobuf library for - // methods supported on all messages. - - /// The valid SQL string to create a prepared statement for. - var query: String = String() - - /// Create/execute the prepared statement as part of this transaction (if - /// unset, executions of the prepared statement will be auto-committed). - var transactionID: Data { - get {return _transactionID ?? Data()} - set {_transactionID = newValue} - } - /// Returns true if `transactionID` has been explicitly set. - var hasTransactionID: Bool {return self._transactionID != nil} - /// Clears the value of `transactionID`. Subsequent reads from it will return its default value. - mutating func clearTransactionID() {self._transactionID = nil} - - var unknownFields = SwiftProtobuf.UnknownStorage() - - init() {} - - fileprivate var _transactionID: Data? = nil -} - -/// -/// An embedded message describing a Substrait plan to execute. -struct Arrow_Flight_Protocol_Sql_SubstraitPlan { - // SwiftProtobuf.Message conformance is added in an extension below. See the - // `Message` and `Message+*Additions` files in the SwiftProtobuf library for - // methods supported on all messages. - - /// The serialized substrait.Plan to create a prepared statement for. - /// XXX(ARROW-16902): this is bytes instead of an embedded message - /// because Protobuf does not really support one DLL using Protobuf - /// definitions from another DLL. - var plan: Data = Data() - - /// The Substrait release, e.g. "0.12.0". This information is not - /// tracked in the plan itself, so this is the only way for consumers - /// to potentially know if they can handle the plan. - var version: String = String() - - var unknownFields = SwiftProtobuf.UnknownStorage() - - init() {} -} - -/// -/// Request message for the "CreatePreparedSubstraitPlan" action on a Flight SQL enabled backend. -struct Arrow_Flight_Protocol_Sql_ActionCreatePreparedSubstraitPlanRequest { - // SwiftProtobuf.Message conformance is added in an extension below. See the - // `Message` and `Message+*Additions` files in the SwiftProtobuf library for - // methods supported on all messages. - - /// The serialized substrait.Plan to create a prepared statement for. - var plan: Arrow_Flight_Protocol_Sql_SubstraitPlan { - get {return _plan ?? Arrow_Flight_Protocol_Sql_SubstraitPlan()} - set {_plan = newValue} - } - /// Returns true if `plan` has been explicitly set. - var hasPlan: Bool {return self._plan != nil} - /// Clears the value of `plan`. Subsequent reads from it will return its default value. - mutating func clearPlan() {self._plan = nil} - - /// Create/execute the prepared statement as part of this transaction (if - /// unset, executions of the prepared statement will be auto-committed). - var transactionID: Data { - get {return _transactionID ?? Data()} - set {_transactionID = newValue} - } - /// Returns true if `transactionID` has been explicitly set. - var hasTransactionID: Bool {return self._transactionID != nil} - /// Clears the value of `transactionID`. Subsequent reads from it will return its default value. - mutating func clearTransactionID() {self._transactionID = nil} - - var unknownFields = SwiftProtobuf.UnknownStorage() - - init() {} - - fileprivate var _plan: Arrow_Flight_Protocol_Sql_SubstraitPlan? = nil - fileprivate var _transactionID: Data? = nil -} - -/// -/// Wrap the result of a "CreatePreparedStatement" or "CreatePreparedSubstraitPlan" action. -/// -/// The resultant PreparedStatement can be closed either: -/// - Manually, through the "ClosePreparedStatement" action; -/// - Automatically, by a server timeout. -/// -/// The result should be wrapped in a google.protobuf.Any message. -struct Arrow_Flight_Protocol_Sql_ActionCreatePreparedStatementResult { - // SwiftProtobuf.Message conformance is added in an extension below. See the - // `Message` and `Message+*Additions` files in the SwiftProtobuf library for - // methods supported on all messages. - - /// Opaque handle for the prepared statement on the server. - var preparedStatementHandle: Data = Data() - - /// If a result set generating query was provided, dataset_schema contains the - /// schema of the dataset as described in Schema.fbs::Schema, it is serialized as an IPC message. - var datasetSchema: Data = Data() - - /// If the query provided contained parameters, parameter_schema contains the - /// schema of the expected parameters as described in Schema.fbs::Schema, it is serialized as an IPC message. - var parameterSchema: Data = Data() - - var unknownFields = SwiftProtobuf.UnknownStorage() - - init() {} -} - -/// -/// Request message for the "ClosePreparedStatement" action on a Flight SQL enabled backend. -/// Closes server resources associated with the prepared statement handle. -struct Arrow_Flight_Protocol_Sql_ActionClosePreparedStatementRequest { - // SwiftProtobuf.Message conformance is added in an extension below. See the - // `Message` and `Message+*Additions` files in the SwiftProtobuf library for - // methods supported on all messages. - - /// Opaque handle for the prepared statement on the server. - var preparedStatementHandle: Data = Data() - - var unknownFields = SwiftProtobuf.UnknownStorage() - - init() {} -} - -/// -/// Request message for the "BeginTransaction" action. -/// Begins a transaction. -struct Arrow_Flight_Protocol_Sql_ActionBeginTransactionRequest { - // SwiftProtobuf.Message conformance is added in an extension below. See the - // `Message` and `Message+*Additions` files in the SwiftProtobuf library for - // methods supported on all messages. - - var unknownFields = SwiftProtobuf.UnknownStorage() - - init() {} -} - -/// -/// Request message for the "BeginSavepoint" action. -/// Creates a savepoint within a transaction. -/// -/// Only supported if FLIGHT_SQL_TRANSACTION is -/// FLIGHT_SQL_TRANSACTION_SUPPORT_SAVEPOINT. -struct Arrow_Flight_Protocol_Sql_ActionBeginSavepointRequest { - // SwiftProtobuf.Message conformance is added in an extension below. See the - // `Message` and `Message+*Additions` files in the SwiftProtobuf library for - // methods supported on all messages. - - /// The transaction to which a savepoint belongs. - var transactionID: Data = Data() - - /// Name for the savepoint. - var name: String = String() - - var unknownFields = SwiftProtobuf.UnknownStorage() - - init() {} -} - -/// -/// The result of a "BeginTransaction" action. -/// -/// The transaction can be manipulated with the "EndTransaction" action, or -/// automatically via server timeout. If the transaction times out, then it is -/// automatically rolled back. -/// -/// The result should be wrapped in a google.protobuf.Any message. -struct Arrow_Flight_Protocol_Sql_ActionBeginTransactionResult { - // SwiftProtobuf.Message conformance is added in an extension below. See the - // `Message` and `Message+*Additions` files in the SwiftProtobuf library for - // methods supported on all messages. - - /// Opaque handle for the transaction on the server. - var transactionID: Data = Data() - - var unknownFields = SwiftProtobuf.UnknownStorage() - - init() {} -} - -/// -/// The result of a "BeginSavepoint" action. -/// -/// The transaction can be manipulated with the "EndSavepoint" action. -/// If the associated transaction is committed, rolled back, or times -/// out, then the savepoint is also invalidated. -/// -/// The result should be wrapped in a google.protobuf.Any message. -struct Arrow_Flight_Protocol_Sql_ActionBeginSavepointResult { - // SwiftProtobuf.Message conformance is added in an extension below. See the - // `Message` and `Message+*Additions` files in the SwiftProtobuf library for - // methods supported on all messages. - - /// Opaque handle for the savepoint on the server. - var savepointID: Data = Data() - - var unknownFields = SwiftProtobuf.UnknownStorage() - - init() {} -} - -/// -/// Request message for the "EndTransaction" action. -/// -/// Commit (COMMIT) or rollback (ROLLBACK) the transaction. -/// -/// If the action completes successfully, the transaction handle is -/// invalidated, as are all associated savepoints. -struct Arrow_Flight_Protocol_Sql_ActionEndTransactionRequest { - // SwiftProtobuf.Message conformance is added in an extension below. See the - // `Message` and `Message+*Additions` files in the SwiftProtobuf library for - // methods supported on all messages. - - /// Opaque handle for the transaction on the server. - var transactionID: Data = Data() - - /// Whether to commit/rollback the given transaction. - var action: Arrow_Flight_Protocol_Sql_ActionEndTransactionRequest.EndTransaction = .unspecified - - var unknownFields = SwiftProtobuf.UnknownStorage() - - enum EndTransaction: SwiftProtobuf.Enum { - typealias RawValue = Int - case unspecified // = 0 - - /// Commit the transaction. - case commit // = 1 - - /// Roll back the transaction. - case rollback // = 2 - case UNRECOGNIZED(Int) - - init() { - self = .unspecified - } - - init?(rawValue: Int) { - switch rawValue { - case 0: self = .unspecified - case 1: self = .commit - case 2: self = .rollback - default: self = .UNRECOGNIZED(rawValue) - } - } - - var rawValue: Int { - switch self { - case .unspecified: return 0 - case .commit: return 1 - case .rollback: return 2 - case .UNRECOGNIZED(let i): return i - } - } - - } - - init() {} -} - -#if swift(>=4.2) - -extension Arrow_Flight_Protocol_Sql_ActionEndTransactionRequest.EndTransaction: CaseIterable { - // The compiler won't synthesize support with the UNRECOGNIZED case. - static var allCases: [Arrow_Flight_Protocol_Sql_ActionEndTransactionRequest.EndTransaction] = [ - .unspecified, - .commit, - .rollback, - ] -} - -#endif // swift(>=4.2) - -/// -/// Request message for the "EndSavepoint" action. -/// -/// Release (RELEASE) the savepoint or rollback (ROLLBACK) to the -/// savepoint. -/// -/// Releasing a savepoint invalidates that savepoint. Rolling back to -/// a savepoint does not invalidate the savepoint, but invalidates all -/// savepoints created after the current savepoint. -struct Arrow_Flight_Protocol_Sql_ActionEndSavepointRequest { - // SwiftProtobuf.Message conformance is added in an extension below. See the - // `Message` and `Message+*Additions` files in the SwiftProtobuf library for - // methods supported on all messages. - - /// Opaque handle for the savepoint on the server. - var savepointID: Data = Data() - - /// Whether to rollback/release the given savepoint. - var action: Arrow_Flight_Protocol_Sql_ActionEndSavepointRequest.EndSavepoint = .unspecified - - var unknownFields = SwiftProtobuf.UnknownStorage() - - enum EndSavepoint: SwiftProtobuf.Enum { - typealias RawValue = Int - case unspecified // = 0 - - /// Release the savepoint. - case release // = 1 - - /// Roll back to a savepoint. - case rollback // = 2 - case UNRECOGNIZED(Int) - - init() { - self = .unspecified - } - - init?(rawValue: Int) { - switch rawValue { - case 0: self = .unspecified - case 1: self = .release - case 2: self = .rollback - default: self = .UNRECOGNIZED(rawValue) - } - } - - var rawValue: Int { - switch self { - case .unspecified: return 0 - case .release: return 1 - case .rollback: return 2 - case .UNRECOGNIZED(let i): return i - } - } - - } - - init() {} -} - -#if swift(>=4.2) - -extension Arrow_Flight_Protocol_Sql_ActionEndSavepointRequest.EndSavepoint: CaseIterable { - // The compiler won't synthesize support with the UNRECOGNIZED case. - static var allCases: [Arrow_Flight_Protocol_Sql_ActionEndSavepointRequest.EndSavepoint] = [ - .unspecified, - .release, - .rollback, - ] -} - -#endif // swift(>=4.2) - -/// -/// Represents a SQL query. Used in the command member of FlightDescriptor -/// for the following RPC calls: -/// - GetSchema: return the Arrow schema of the query. -/// Fields on this schema may contain the following metadata: -/// - ARROW:FLIGHT:SQL:CATALOG_NAME - Table's catalog name -/// - ARROW:FLIGHT:SQL:DB_SCHEMA_NAME - Database schema name -/// - ARROW:FLIGHT:SQL:TABLE_NAME - Table name -/// - ARROW:FLIGHT:SQL:TYPE_NAME - The data source-specific name for the data type of the column. -/// - ARROW:FLIGHT:SQL:PRECISION - Column precision/size -/// - ARROW:FLIGHT:SQL:SCALE - Column scale/decimal digits if applicable -/// - ARROW:FLIGHT:SQL:IS_AUTO_INCREMENT - "1" indicates if the column is auto incremented, "0" otherwise. -/// - ARROW:FLIGHT:SQL:IS_CASE_SENSITIVE - "1" indicates if the column is case-sensitive, "0" otherwise. -/// - ARROW:FLIGHT:SQL:IS_READ_ONLY - "1" indicates if the column is read only, "0" otherwise. -/// - ARROW:FLIGHT:SQL:IS_SEARCHABLE - "1" indicates if the column is searchable via WHERE clause, "0" otherwise. -/// - GetFlightInfo: execute the query. -struct Arrow_Flight_Protocol_Sql_CommandStatementQuery { - // SwiftProtobuf.Message conformance is added in an extension below. See the - // `Message` and `Message+*Additions` files in the SwiftProtobuf library for - // methods supported on all messages. - - /// The SQL syntax. - var query: String = String() - - /// Include the query as part of this transaction (if unset, the query is auto-committed). - var transactionID: Data { - get {return _transactionID ?? Data()} - set {_transactionID = newValue} - } - /// Returns true if `transactionID` has been explicitly set. - var hasTransactionID: Bool {return self._transactionID != nil} - /// Clears the value of `transactionID`. Subsequent reads from it will return its default value. - mutating func clearTransactionID() {self._transactionID = nil} - - var unknownFields = SwiftProtobuf.UnknownStorage() - - init() {} - - fileprivate var _transactionID: Data? = nil -} - -/// -/// Represents a Substrait plan. Used in the command member of FlightDescriptor -/// for the following RPC calls: -/// - GetSchema: return the Arrow schema of the query. -/// Fields on this schema may contain the following metadata: -/// - ARROW:FLIGHT:SQL:CATALOG_NAME - Table's catalog name -/// - ARROW:FLIGHT:SQL:DB_SCHEMA_NAME - Database schema name -/// - ARROW:FLIGHT:SQL:TABLE_NAME - Table name -/// - ARROW:FLIGHT:SQL:TYPE_NAME - The data source-specific name for the data type of the column. -/// - ARROW:FLIGHT:SQL:PRECISION - Column precision/size -/// - ARROW:FLIGHT:SQL:SCALE - Column scale/decimal digits if applicable -/// - ARROW:FLIGHT:SQL:IS_AUTO_INCREMENT - "1" indicates if the column is auto incremented, "0" otherwise. -/// - ARROW:FLIGHT:SQL:IS_CASE_SENSITIVE - "1" indicates if the column is case-sensitive, "0" otherwise. -/// - ARROW:FLIGHT:SQL:IS_READ_ONLY - "1" indicates if the column is read only, "0" otherwise. -/// - ARROW:FLIGHT:SQL:IS_SEARCHABLE - "1" indicates if the column is searchable via WHERE clause, "0" otherwise. -/// - GetFlightInfo: execute the query. -/// - DoPut: execute the query. -struct Arrow_Flight_Protocol_Sql_CommandStatementSubstraitPlan { - // SwiftProtobuf.Message conformance is added in an extension below. See the - // `Message` and `Message+*Additions` files in the SwiftProtobuf library for - // methods supported on all messages. - - /// A serialized substrait.Plan - var plan: Arrow_Flight_Protocol_Sql_SubstraitPlan { - get {return _plan ?? Arrow_Flight_Protocol_Sql_SubstraitPlan()} - set {_plan = newValue} - } - /// Returns true if `plan` has been explicitly set. - var hasPlan: Bool {return self._plan != nil} - /// Clears the value of `plan`. Subsequent reads from it will return its default value. - mutating func clearPlan() {self._plan = nil} - - /// Include the query as part of this transaction (if unset, the query is auto-committed). - var transactionID: Data { - get {return _transactionID ?? Data()} - set {_transactionID = newValue} - } - /// Returns true if `transactionID` has been explicitly set. - var hasTransactionID: Bool {return self._transactionID != nil} - /// Clears the value of `transactionID`. Subsequent reads from it will return its default value. - mutating func clearTransactionID() {self._transactionID = nil} - - var unknownFields = SwiftProtobuf.UnknownStorage() - - init() {} - - fileprivate var _plan: Arrow_Flight_Protocol_Sql_SubstraitPlan? = nil - fileprivate var _transactionID: Data? = nil -} - -///* -/// Represents a ticket resulting from GetFlightInfo with a CommandStatementQuery. -/// This should be used only once and treated as an opaque value, that is, clients should not attempt to parse this. -struct Arrow_Flight_Protocol_Sql_TicketStatementQuery { - // SwiftProtobuf.Message conformance is added in an extension below. See the - // `Message` and `Message+*Additions` files in the SwiftProtobuf library for - // methods supported on all messages. - - /// Unique identifier for the instance of the statement to execute. - var statementHandle: Data = Data() - - var unknownFields = SwiftProtobuf.UnknownStorage() - - init() {} -} - -/// -/// Represents an instance of executing a prepared statement. Used in the command member of FlightDescriptor for -/// the following RPC calls: -/// - GetSchema: return the Arrow schema of the query. -/// Fields on this schema may contain the following metadata: -/// - ARROW:FLIGHT:SQL:CATALOG_NAME - Table's catalog name -/// - ARROW:FLIGHT:SQL:DB_SCHEMA_NAME - Database schema name -/// - ARROW:FLIGHT:SQL:TABLE_NAME - Table name -/// - ARROW:FLIGHT:SQL:TYPE_NAME - The data source-specific name for the data type of the column. -/// - ARROW:FLIGHT:SQL:PRECISION - Column precision/size -/// - ARROW:FLIGHT:SQL:SCALE - Column scale/decimal digits if applicable -/// - ARROW:FLIGHT:SQL:IS_AUTO_INCREMENT - "1" indicates if the column is auto incremented, "0" otherwise. -/// - ARROW:FLIGHT:SQL:IS_CASE_SENSITIVE - "1" indicates if the column is case-sensitive, "0" otherwise. -/// - ARROW:FLIGHT:SQL:IS_READ_ONLY - "1" indicates if the column is read only, "0" otherwise. -/// - ARROW:FLIGHT:SQL:IS_SEARCHABLE - "1" indicates if the column is searchable via WHERE clause, "0" otherwise. -/// - DoPut: bind parameter values. All of the bound parameter sets will be executed as a single atomic execution. -/// - GetFlightInfo: execute the prepared statement instance. -struct Arrow_Flight_Protocol_Sql_CommandPreparedStatementQuery { - // SwiftProtobuf.Message conformance is added in an extension below. See the - // `Message` and `Message+*Additions` files in the SwiftProtobuf library for - // methods supported on all messages. - - /// Opaque handle for the prepared statement on the server. - var preparedStatementHandle: Data = Data() - - var unknownFields = SwiftProtobuf.UnknownStorage() - - init() {} -} - -/// -/// Represents a SQL update query. Used in the command member of FlightDescriptor -/// for the RPC call DoPut to cause the server to execute the included SQL update. -struct Arrow_Flight_Protocol_Sql_CommandStatementUpdate { - // SwiftProtobuf.Message conformance is added in an extension below. See the - // `Message` and `Message+*Additions` files in the SwiftProtobuf library for - // methods supported on all messages. - - /// The SQL syntax. - var query: String = String() - - /// Include the query as part of this transaction (if unset, the query is auto-committed). - var transactionID: Data { - get {return _transactionID ?? Data()} - set {_transactionID = newValue} - } - /// Returns true if `transactionID` has been explicitly set. - var hasTransactionID: Bool {return self._transactionID != nil} - /// Clears the value of `transactionID`. Subsequent reads from it will return its default value. - mutating func clearTransactionID() {self._transactionID = nil} - - var unknownFields = SwiftProtobuf.UnknownStorage() - - init() {} - - fileprivate var _transactionID: Data? = nil -} - -/// -/// Represents a SQL update query. Used in the command member of FlightDescriptor -/// for the RPC call DoPut to cause the server to execute the included -/// prepared statement handle as an update. -struct Arrow_Flight_Protocol_Sql_CommandPreparedStatementUpdate { - // SwiftProtobuf.Message conformance is added in an extension below. See the - // `Message` and `Message+*Additions` files in the SwiftProtobuf library for - // methods supported on all messages. - - /// Opaque handle for the prepared statement on the server. - var preparedStatementHandle: Data = Data() - - var unknownFields = SwiftProtobuf.UnknownStorage() - - init() {} -} - -/// -/// Returned from the RPC call DoPut when a CommandStatementUpdate -/// CommandPreparedStatementUpdate was in the request, containing -/// results from the update. -struct Arrow_Flight_Protocol_Sql_DoPutUpdateResult { - // SwiftProtobuf.Message conformance is added in an extension below. See the - // `Message` and `Message+*Additions` files in the SwiftProtobuf library for - // methods supported on all messages. - - /// The number of records updated. A return value of -1 represents - /// an unknown updated record count. - var recordCount: Int64 = 0 - - var unknownFields = SwiftProtobuf.UnknownStorage() - - init() {} -} - -/// -/// Request message for the "CancelQuery" action. -/// -/// Explicitly cancel a running query. -/// -/// This lets a single client explicitly cancel work, no matter how many clients -/// are involved/whether the query is distributed or not, given server support. -/// The transaction/statement is not rolled back; it is the application's job to -/// commit or rollback as appropriate. This only indicates the client no longer -/// wishes to read the remainder of the query results or continue submitting -/// data. -/// -/// This command is idempotent. -/// -/// This command is deprecated since 13.0.0. Use the "CancelFlightInfo" -/// action with DoAction instead. -struct Arrow_Flight_Protocol_Sql_ActionCancelQueryRequest { - // SwiftProtobuf.Message conformance is added in an extension below. See the - // `Message` and `Message+*Additions` files in the SwiftProtobuf library for - // methods supported on all messages. - - /// The result of the GetFlightInfo RPC that initiated the query. - /// XXX(ARROW-16902): this must be a serialized FlightInfo, but is - /// rendered as bytes because Protobuf does not really support one - /// DLL using Protobuf definitions from another DLL. - var info: Data = Data() - - var unknownFields = SwiftProtobuf.UnknownStorage() - - init() {} -} - -/// -/// The result of cancelling a query. -/// -/// The result should be wrapped in a google.protobuf.Any message. -/// -/// This command is deprecated since 13.0.0. Use the "CancelFlightInfo" -/// action with DoAction instead. -struct Arrow_Flight_Protocol_Sql_ActionCancelQueryResult { - // SwiftProtobuf.Message conformance is added in an extension below. See the - // `Message` and `Message+*Additions` files in the SwiftProtobuf library for - // methods supported on all messages. - - var result: Arrow_Flight_Protocol_Sql_ActionCancelQueryResult.CancelResult = .unspecified - - var unknownFields = SwiftProtobuf.UnknownStorage() - - enum CancelResult: SwiftProtobuf.Enum { - typealias RawValue = Int - - /// The cancellation status is unknown. Servers should avoid using - /// this value (send a NOT_FOUND error if the requested query is - /// not known). Clients can retry the request. - case unspecified // = 0 - - /// The cancellation request is complete. Subsequent requests with - /// the same payload may return CANCELLED or a NOT_FOUND error. - case cancelled // = 1 - - /// The cancellation request is in progress. The client may retry - /// the cancellation request. - case cancelling // = 2 - - /// The query is not cancellable. The client should not retry the - /// cancellation request. - case notCancellable // = 3 - case UNRECOGNIZED(Int) - - init() { - self = .unspecified - } - - init?(rawValue: Int) { - switch rawValue { - case 0: self = .unspecified - case 1: self = .cancelled - case 2: self = .cancelling - case 3: self = .notCancellable - default: self = .UNRECOGNIZED(rawValue) - } - } - - var rawValue: Int { - switch self { - case .unspecified: return 0 - case .cancelled: return 1 - case .cancelling: return 2 - case .notCancellable: return 3 - case .UNRECOGNIZED(let i): return i - } - } - - } - - init() {} -} - -#if swift(>=4.2) - -extension Arrow_Flight_Protocol_Sql_ActionCancelQueryResult.CancelResult: CaseIterable { - // The compiler won't synthesize support with the UNRECOGNIZED case. - static var allCases: [Arrow_Flight_Protocol_Sql_ActionCancelQueryResult.CancelResult] = [ - .unspecified, - .cancelled, - .cancelling, - .notCancellable, - ] -} - -#endif // swift(>=4.2) - -#if swift(>=5.5) && canImport(_Concurrency) -extension Arrow_Flight_Protocol_Sql_SqlInfo: @unchecked Sendable {} -extension Arrow_Flight_Protocol_Sql_SqlSupportedTransaction: @unchecked Sendable {} -extension Arrow_Flight_Protocol_Sql_SqlSupportedCaseSensitivity: @unchecked Sendable {} -extension Arrow_Flight_Protocol_Sql_SqlNullOrdering: @unchecked Sendable {} -extension Arrow_Flight_Protocol_Sql_SupportedSqlGrammar: @unchecked Sendable {} -extension Arrow_Flight_Protocol_Sql_SupportedAnsi92SqlGrammarLevel: @unchecked Sendable {} -extension Arrow_Flight_Protocol_Sql_SqlOuterJoinsSupportLevel: @unchecked Sendable {} -extension Arrow_Flight_Protocol_Sql_SqlSupportedGroupBy: @unchecked Sendable {} -extension Arrow_Flight_Protocol_Sql_SqlSupportedElementActions: @unchecked Sendable {} -extension Arrow_Flight_Protocol_Sql_SqlSupportedPositionedCommands: @unchecked Sendable {} -extension Arrow_Flight_Protocol_Sql_SqlSupportedSubqueries: @unchecked Sendable {} -extension Arrow_Flight_Protocol_Sql_SqlSupportedUnions: @unchecked Sendable {} -extension Arrow_Flight_Protocol_Sql_SqlTransactionIsolationLevel: @unchecked Sendable {} -extension Arrow_Flight_Protocol_Sql_SqlSupportedTransactions: @unchecked Sendable {} -extension Arrow_Flight_Protocol_Sql_SqlSupportedResultSetType: @unchecked Sendable {} -extension Arrow_Flight_Protocol_Sql_SqlSupportedResultSetConcurrency: @unchecked Sendable {} -extension Arrow_Flight_Protocol_Sql_SqlSupportsConvert: @unchecked Sendable {} -extension Arrow_Flight_Protocol_Sql_XdbcDataType: @unchecked Sendable {} -extension Arrow_Flight_Protocol_Sql_XdbcDatetimeSubcode: @unchecked Sendable {} -extension Arrow_Flight_Protocol_Sql_Nullable: @unchecked Sendable {} -extension Arrow_Flight_Protocol_Sql_Searchable: @unchecked Sendable {} -extension Arrow_Flight_Protocol_Sql_UpdateDeleteRules: @unchecked Sendable {} -extension Arrow_Flight_Protocol_Sql_CommandGetSqlInfo: @unchecked Sendable {} -extension Arrow_Flight_Protocol_Sql_CommandGetXdbcTypeInfo: @unchecked Sendable {} -extension Arrow_Flight_Protocol_Sql_CommandGetCatalogs: @unchecked Sendable {} -extension Arrow_Flight_Protocol_Sql_CommandGetDbSchemas: @unchecked Sendable {} -extension Arrow_Flight_Protocol_Sql_CommandGetTables: @unchecked Sendable {} -extension Arrow_Flight_Protocol_Sql_CommandGetTableTypes: @unchecked Sendable {} -extension Arrow_Flight_Protocol_Sql_CommandGetPrimaryKeys: @unchecked Sendable {} -extension Arrow_Flight_Protocol_Sql_CommandGetExportedKeys: @unchecked Sendable {} -extension Arrow_Flight_Protocol_Sql_CommandGetImportedKeys: @unchecked Sendable {} -extension Arrow_Flight_Protocol_Sql_CommandGetCrossReference: @unchecked Sendable {} -extension Arrow_Flight_Protocol_Sql_ActionCreatePreparedStatementRequest: @unchecked Sendable {} -extension Arrow_Flight_Protocol_Sql_SubstraitPlan: @unchecked Sendable {} -extension Arrow_Flight_Protocol_Sql_ActionCreatePreparedSubstraitPlanRequest: @unchecked Sendable {} -extension Arrow_Flight_Protocol_Sql_ActionCreatePreparedStatementResult: @unchecked Sendable {} -extension Arrow_Flight_Protocol_Sql_ActionClosePreparedStatementRequest: @unchecked Sendable {} -extension Arrow_Flight_Protocol_Sql_ActionBeginTransactionRequest: @unchecked Sendable {} -extension Arrow_Flight_Protocol_Sql_ActionBeginSavepointRequest: @unchecked Sendable {} -extension Arrow_Flight_Protocol_Sql_ActionBeginTransactionResult: @unchecked Sendable {} -extension Arrow_Flight_Protocol_Sql_ActionBeginSavepointResult: @unchecked Sendable {} -extension Arrow_Flight_Protocol_Sql_ActionEndTransactionRequest: @unchecked Sendable {} -extension Arrow_Flight_Protocol_Sql_ActionEndTransactionRequest.EndTransaction: @unchecked Sendable {} -extension Arrow_Flight_Protocol_Sql_ActionEndSavepointRequest: @unchecked Sendable {} -extension Arrow_Flight_Protocol_Sql_ActionEndSavepointRequest.EndSavepoint: @unchecked Sendable {} -extension Arrow_Flight_Protocol_Sql_CommandStatementQuery: @unchecked Sendable {} -extension Arrow_Flight_Protocol_Sql_CommandStatementSubstraitPlan: @unchecked Sendable {} -extension Arrow_Flight_Protocol_Sql_TicketStatementQuery: @unchecked Sendable {} -extension Arrow_Flight_Protocol_Sql_CommandPreparedStatementQuery: @unchecked Sendable {} -extension Arrow_Flight_Protocol_Sql_CommandStatementUpdate: @unchecked Sendable {} -extension Arrow_Flight_Protocol_Sql_CommandPreparedStatementUpdate: @unchecked Sendable {} -extension Arrow_Flight_Protocol_Sql_DoPutUpdateResult: @unchecked Sendable {} -extension Arrow_Flight_Protocol_Sql_ActionCancelQueryRequest: @unchecked Sendable {} -extension Arrow_Flight_Protocol_Sql_ActionCancelQueryResult: @unchecked Sendable {} -extension Arrow_Flight_Protocol_Sql_ActionCancelQueryResult.CancelResult: @unchecked Sendable {} -#endif // swift(>=5.5) && canImport(_Concurrency) - -// MARK: - Extension support defined in FlightSql.proto. - -// MARK: - Extension Properties - -// Swift Extensions on the extended Messages to add easy access to the declared -// extension fields. The names are based on the extension field name from the proto -// declaration. To avoid naming collisions, the names are prefixed with the name of -// the scope where the extend directive occurs. - -extension SwiftProtobuf.Google_Protobuf_MessageOptions { - - var Arrow_Flight_Protocol_Sql_experimental: Bool { - get {return getExtensionValue(ext: Arrow_Flight_Protocol_Sql_Extensions_experimental) ?? false} - set {setExtensionValue(ext: Arrow_Flight_Protocol_Sql_Extensions_experimental, value: newValue)} - } - /// Returns true if extension `Arrow_Flight_Protocol_Sql_Extensions_experimental` - /// has been explicitly set. - var hasArrow_Flight_Protocol_Sql_experimental: Bool { - return hasExtensionValue(ext: Arrow_Flight_Protocol_Sql_Extensions_experimental) - } - /// Clears the value of extension `Arrow_Flight_Protocol_Sql_Extensions_experimental`. - /// Subsequent reads from it will return its default value. - mutating func clearArrow_Flight_Protocol_Sql_experimental() { - clearExtensionValue(ext: Arrow_Flight_Protocol_Sql_Extensions_experimental) - } - -} - -// MARK: - File's ExtensionMap: Arrow_Flight_Protocol_Sql_FlightSql_Extensions - -/// A `SwiftProtobuf.SimpleExtensionMap` that includes all of the extensions defined by -/// this .proto file. It can be used any place an `SwiftProtobuf.ExtensionMap` is needed -/// in parsing, or it can be combined with other `SwiftProtobuf.SimpleExtensionMap`s to create -/// a larger `SwiftProtobuf.SimpleExtensionMap`. -let Arrow_Flight_Protocol_Sql_FlightSql_Extensions: SwiftProtobuf.SimpleExtensionMap = [ - Arrow_Flight_Protocol_Sql_Extensions_experimental -] - -// Extension Objects - The only reason these might be needed is when manually -// constructing a `SimpleExtensionMap`, otherwise, use the above _Extension Properties_ -// accessors for the extension fields on the messages directly. - -let Arrow_Flight_Protocol_Sql_Extensions_experimental = SwiftProtobuf.MessageExtension, SwiftProtobuf.Google_Protobuf_MessageOptions>( - _protobuf_fieldNumber: 1000, - fieldName: "arrow.flight.protocol.sql.experimental" -) - -// MARK: - Code below here is support for the SwiftProtobuf runtime. - -fileprivate let _protobuf_package = "arrow.flight.protocol.sql" - -extension Arrow_Flight_Protocol_Sql_SqlInfo: SwiftProtobuf._ProtoNameProviding { - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 0: .same(proto: "FLIGHT_SQL_SERVER_NAME"), - 1: .same(proto: "FLIGHT_SQL_SERVER_VERSION"), - 2: .same(proto: "FLIGHT_SQL_SERVER_ARROW_VERSION"), - 3: .same(proto: "FLIGHT_SQL_SERVER_READ_ONLY"), - 4: .same(proto: "FLIGHT_SQL_SERVER_SQL"), - 5: .same(proto: "FLIGHT_SQL_SERVER_SUBSTRAIT"), - 6: .same(proto: "FLIGHT_SQL_SERVER_SUBSTRAIT_MIN_VERSION"), - 7: .same(proto: "FLIGHT_SQL_SERVER_SUBSTRAIT_MAX_VERSION"), - 8: .same(proto: "FLIGHT_SQL_SERVER_TRANSACTION"), - 9: .same(proto: "FLIGHT_SQL_SERVER_CANCEL"), - 100: .same(proto: "FLIGHT_SQL_SERVER_STATEMENT_TIMEOUT"), - 101: .same(proto: "FLIGHT_SQL_SERVER_TRANSACTION_TIMEOUT"), - 500: .same(proto: "SQL_DDL_CATALOG"), - 501: .same(proto: "SQL_DDL_SCHEMA"), - 502: .same(proto: "SQL_DDL_TABLE"), - 503: .same(proto: "SQL_IDENTIFIER_CASE"), - 504: .same(proto: "SQL_IDENTIFIER_QUOTE_CHAR"), - 505: .same(proto: "SQL_QUOTED_IDENTIFIER_CASE"), - 506: .same(proto: "SQL_ALL_TABLES_ARE_SELECTABLE"), - 507: .same(proto: "SQL_NULL_ORDERING"), - 508: .same(proto: "SQL_KEYWORDS"), - 509: .same(proto: "SQL_NUMERIC_FUNCTIONS"), - 510: .same(proto: "SQL_STRING_FUNCTIONS"), - 511: .same(proto: "SQL_SYSTEM_FUNCTIONS"), - 512: .same(proto: "SQL_DATETIME_FUNCTIONS"), - 513: .same(proto: "SQL_SEARCH_STRING_ESCAPE"), - 514: .same(proto: "SQL_EXTRA_NAME_CHARACTERS"), - 515: .same(proto: "SQL_SUPPORTS_COLUMN_ALIASING"), - 516: .same(proto: "SQL_NULL_PLUS_NULL_IS_NULL"), - 517: .same(proto: "SQL_SUPPORTS_CONVERT"), - 518: .same(proto: "SQL_SUPPORTS_TABLE_CORRELATION_NAMES"), - 519: .same(proto: "SQL_SUPPORTS_DIFFERENT_TABLE_CORRELATION_NAMES"), - 520: .same(proto: "SQL_SUPPORTS_EXPRESSIONS_IN_ORDER_BY"), - 521: .same(proto: "SQL_SUPPORTS_ORDER_BY_UNRELATED"), - 522: .same(proto: "SQL_SUPPORTED_GROUP_BY"), - 523: .same(proto: "SQL_SUPPORTS_LIKE_ESCAPE_CLAUSE"), - 524: .same(proto: "SQL_SUPPORTS_NON_NULLABLE_COLUMNS"), - 525: .same(proto: "SQL_SUPPORTED_GRAMMAR"), - 526: .same(proto: "SQL_ANSI92_SUPPORTED_LEVEL"), - 527: .same(proto: "SQL_SUPPORTS_INTEGRITY_ENHANCEMENT_FACILITY"), - 528: .same(proto: "SQL_OUTER_JOINS_SUPPORT_LEVEL"), - 529: .same(proto: "SQL_SCHEMA_TERM"), - 530: .same(proto: "SQL_PROCEDURE_TERM"), - 531: .same(proto: "SQL_CATALOG_TERM"), - 532: .same(proto: "SQL_CATALOG_AT_START"), - 533: .same(proto: "SQL_SCHEMAS_SUPPORTED_ACTIONS"), - 534: .same(proto: "SQL_CATALOGS_SUPPORTED_ACTIONS"), - 535: .same(proto: "SQL_SUPPORTED_POSITIONED_COMMANDS"), - 536: .same(proto: "SQL_SELECT_FOR_UPDATE_SUPPORTED"), - 537: .same(proto: "SQL_STORED_PROCEDURES_SUPPORTED"), - 538: .same(proto: "SQL_SUPPORTED_SUBQUERIES"), - 539: .same(proto: "SQL_CORRELATED_SUBQUERIES_SUPPORTED"), - 540: .same(proto: "SQL_SUPPORTED_UNIONS"), - 541: .same(proto: "SQL_MAX_BINARY_LITERAL_LENGTH"), - 542: .same(proto: "SQL_MAX_CHAR_LITERAL_LENGTH"), - 543: .same(proto: "SQL_MAX_COLUMN_NAME_LENGTH"), - 544: .same(proto: "SQL_MAX_COLUMNS_IN_GROUP_BY"), - 545: .same(proto: "SQL_MAX_COLUMNS_IN_INDEX"), - 546: .same(proto: "SQL_MAX_COLUMNS_IN_ORDER_BY"), - 547: .same(proto: "SQL_MAX_COLUMNS_IN_SELECT"), - 548: .same(proto: "SQL_MAX_COLUMNS_IN_TABLE"), - 549: .same(proto: "SQL_MAX_CONNECTIONS"), - 550: .same(proto: "SQL_MAX_CURSOR_NAME_LENGTH"), - 551: .same(proto: "SQL_MAX_INDEX_LENGTH"), - 552: .same(proto: "SQL_DB_SCHEMA_NAME_LENGTH"), - 553: .same(proto: "SQL_MAX_PROCEDURE_NAME_LENGTH"), - 554: .same(proto: "SQL_MAX_CATALOG_NAME_LENGTH"), - 555: .same(proto: "SQL_MAX_ROW_SIZE"), - 556: .same(proto: "SQL_MAX_ROW_SIZE_INCLUDES_BLOBS"), - 557: .same(proto: "SQL_MAX_STATEMENT_LENGTH"), - 558: .same(proto: "SQL_MAX_STATEMENTS"), - 559: .same(proto: "SQL_MAX_TABLE_NAME_LENGTH"), - 560: .same(proto: "SQL_MAX_TABLES_IN_SELECT"), - 561: .same(proto: "SQL_MAX_USERNAME_LENGTH"), - 562: .same(proto: "SQL_DEFAULT_TRANSACTION_ISOLATION"), - 563: .same(proto: "SQL_TRANSACTIONS_SUPPORTED"), - 564: .same(proto: "SQL_SUPPORTED_TRANSACTIONS_ISOLATION_LEVELS"), - 565: .same(proto: "SQL_DATA_DEFINITION_CAUSES_TRANSACTION_COMMIT"), - 566: .same(proto: "SQL_DATA_DEFINITIONS_IN_TRANSACTIONS_IGNORED"), - 567: .same(proto: "SQL_SUPPORTED_RESULT_SET_TYPES"), - 568: .same(proto: "SQL_SUPPORTED_CONCURRENCIES_FOR_RESULT_SET_UNSPECIFIED"), - 569: .same(proto: "SQL_SUPPORTED_CONCURRENCIES_FOR_RESULT_SET_FORWARD_ONLY"), - 570: .same(proto: "SQL_SUPPORTED_CONCURRENCIES_FOR_RESULT_SET_SCROLL_SENSITIVE"), - 571: .same(proto: "SQL_SUPPORTED_CONCURRENCIES_FOR_RESULT_SET_SCROLL_INSENSITIVE"), - 572: .same(proto: "SQL_BATCH_UPDATES_SUPPORTED"), - 573: .same(proto: "SQL_SAVEPOINTS_SUPPORTED"), - 574: .same(proto: "SQL_NAMED_PARAMETERS_SUPPORTED"), - 575: .same(proto: "SQL_LOCATORS_UPDATE_COPY"), - 576: .same(proto: "SQL_STORED_FUNCTIONS_USING_CALL_SYNTAX_SUPPORTED"), - ] -} - -extension Arrow_Flight_Protocol_Sql_SqlSupportedTransaction: SwiftProtobuf._ProtoNameProviding { - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 0: .same(proto: "SQL_SUPPORTED_TRANSACTION_NONE"), - 1: .same(proto: "SQL_SUPPORTED_TRANSACTION_TRANSACTION"), - 2: .same(proto: "SQL_SUPPORTED_TRANSACTION_SAVEPOINT"), - ] -} - -extension Arrow_Flight_Protocol_Sql_SqlSupportedCaseSensitivity: SwiftProtobuf._ProtoNameProviding { - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 0: .same(proto: "SQL_CASE_SENSITIVITY_UNKNOWN"), - 1: .same(proto: "SQL_CASE_SENSITIVITY_CASE_INSENSITIVE"), - 2: .same(proto: "SQL_CASE_SENSITIVITY_UPPERCASE"), - 3: .same(proto: "SQL_CASE_SENSITIVITY_LOWERCASE"), - ] -} - -extension Arrow_Flight_Protocol_Sql_SqlNullOrdering: SwiftProtobuf._ProtoNameProviding { - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 0: .same(proto: "SQL_NULLS_SORTED_HIGH"), - 1: .same(proto: "SQL_NULLS_SORTED_LOW"), - 2: .same(proto: "SQL_NULLS_SORTED_AT_START"), - 3: .same(proto: "SQL_NULLS_SORTED_AT_END"), - ] -} - -extension Arrow_Flight_Protocol_Sql_SupportedSqlGrammar: SwiftProtobuf._ProtoNameProviding { - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 0: .same(proto: "SQL_MINIMUM_GRAMMAR"), - 1: .same(proto: "SQL_CORE_GRAMMAR"), - 2: .same(proto: "SQL_EXTENDED_GRAMMAR"), - ] -} - -extension Arrow_Flight_Protocol_Sql_SupportedAnsi92SqlGrammarLevel: SwiftProtobuf._ProtoNameProviding { - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 0: .same(proto: "ANSI92_ENTRY_SQL"), - 1: .same(proto: "ANSI92_INTERMEDIATE_SQL"), - 2: .same(proto: "ANSI92_FULL_SQL"), - ] -} - -extension Arrow_Flight_Protocol_Sql_SqlOuterJoinsSupportLevel: SwiftProtobuf._ProtoNameProviding { - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 0: .same(proto: "SQL_JOINS_UNSUPPORTED"), - 1: .same(proto: "SQL_LIMITED_OUTER_JOINS"), - 2: .same(proto: "SQL_FULL_OUTER_JOINS"), - ] -} - -extension Arrow_Flight_Protocol_Sql_SqlSupportedGroupBy: SwiftProtobuf._ProtoNameProviding { - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 0: .same(proto: "SQL_GROUP_BY_UNRELATED"), - 1: .same(proto: "SQL_GROUP_BY_BEYOND_SELECT"), - ] -} - -extension Arrow_Flight_Protocol_Sql_SqlSupportedElementActions: SwiftProtobuf._ProtoNameProviding { - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 0: .same(proto: "SQL_ELEMENT_IN_PROCEDURE_CALLS"), - 1: .same(proto: "SQL_ELEMENT_IN_INDEX_DEFINITIONS"), - 2: .same(proto: "SQL_ELEMENT_IN_PRIVILEGE_DEFINITIONS"), - ] -} - -extension Arrow_Flight_Protocol_Sql_SqlSupportedPositionedCommands: SwiftProtobuf._ProtoNameProviding { - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 0: .same(proto: "SQL_POSITIONED_DELETE"), - 1: .same(proto: "SQL_POSITIONED_UPDATE"), - ] -} - -extension Arrow_Flight_Protocol_Sql_SqlSupportedSubqueries: SwiftProtobuf._ProtoNameProviding { - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 0: .same(proto: "SQL_SUBQUERIES_IN_COMPARISONS"), - 1: .same(proto: "SQL_SUBQUERIES_IN_EXISTS"), - 2: .same(proto: "SQL_SUBQUERIES_IN_INS"), - 3: .same(proto: "SQL_SUBQUERIES_IN_QUANTIFIEDS"), - ] -} - -extension Arrow_Flight_Protocol_Sql_SqlSupportedUnions: SwiftProtobuf._ProtoNameProviding { - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 0: .same(proto: "SQL_UNION"), - 1: .same(proto: "SQL_UNION_ALL"), - ] -} - -extension Arrow_Flight_Protocol_Sql_SqlTransactionIsolationLevel: SwiftProtobuf._ProtoNameProviding { - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 0: .same(proto: "SQL_TRANSACTION_NONE"), - 1: .same(proto: "SQL_TRANSACTION_READ_UNCOMMITTED"), - 2: .same(proto: "SQL_TRANSACTION_READ_COMMITTED"), - 3: .same(proto: "SQL_TRANSACTION_REPEATABLE_READ"), - 4: .same(proto: "SQL_TRANSACTION_SERIALIZABLE"), - ] -} - -extension Arrow_Flight_Protocol_Sql_SqlSupportedTransactions: SwiftProtobuf._ProtoNameProviding { - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 0: .same(proto: "SQL_TRANSACTION_UNSPECIFIED"), - 1: .same(proto: "SQL_DATA_DEFINITION_TRANSACTIONS"), - 2: .same(proto: "SQL_DATA_MANIPULATION_TRANSACTIONS"), - ] -} - -extension Arrow_Flight_Protocol_Sql_SqlSupportedResultSetType: SwiftProtobuf._ProtoNameProviding { - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 0: .same(proto: "SQL_RESULT_SET_TYPE_UNSPECIFIED"), - 1: .same(proto: "SQL_RESULT_SET_TYPE_FORWARD_ONLY"), - 2: .same(proto: "SQL_RESULT_SET_TYPE_SCROLL_INSENSITIVE"), - 3: .same(proto: "SQL_RESULT_SET_TYPE_SCROLL_SENSITIVE"), - ] -} - -extension Arrow_Flight_Protocol_Sql_SqlSupportedResultSetConcurrency: SwiftProtobuf._ProtoNameProviding { - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 0: .same(proto: "SQL_RESULT_SET_CONCURRENCY_UNSPECIFIED"), - 1: .same(proto: "SQL_RESULT_SET_CONCURRENCY_READ_ONLY"), - 2: .same(proto: "SQL_RESULT_SET_CONCURRENCY_UPDATABLE"), - ] -} - -extension Arrow_Flight_Protocol_Sql_SqlSupportsConvert: SwiftProtobuf._ProtoNameProviding { - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 0: .same(proto: "SQL_CONVERT_BIGINT"), - 1: .same(proto: "SQL_CONVERT_BINARY"), - 2: .same(proto: "SQL_CONVERT_BIT"), - 3: .same(proto: "SQL_CONVERT_CHAR"), - 4: .same(proto: "SQL_CONVERT_DATE"), - 5: .same(proto: "SQL_CONVERT_DECIMAL"), - 6: .same(proto: "SQL_CONVERT_FLOAT"), - 7: .same(proto: "SQL_CONVERT_INTEGER"), - 8: .same(proto: "SQL_CONVERT_INTERVAL_DAY_TIME"), - 9: .same(proto: "SQL_CONVERT_INTERVAL_YEAR_MONTH"), - 10: .same(proto: "SQL_CONVERT_LONGVARBINARY"), - 11: .same(proto: "SQL_CONVERT_LONGVARCHAR"), - 12: .same(proto: "SQL_CONVERT_NUMERIC"), - 13: .same(proto: "SQL_CONVERT_REAL"), - 14: .same(proto: "SQL_CONVERT_SMALLINT"), - 15: .same(proto: "SQL_CONVERT_TIME"), - 16: .same(proto: "SQL_CONVERT_TIMESTAMP"), - 17: .same(proto: "SQL_CONVERT_TINYINT"), - 18: .same(proto: "SQL_CONVERT_VARBINARY"), - 19: .same(proto: "SQL_CONVERT_VARCHAR"), - ] -} - -extension Arrow_Flight_Protocol_Sql_XdbcDataType: SwiftProtobuf._ProtoNameProviding { - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - -9: .same(proto: "XDBC_WVARCHAR"), - -8: .same(proto: "XDBC_WCHAR"), - -7: .same(proto: "XDBC_BIT"), - -6: .same(proto: "XDBC_TINYINT"), - -5: .same(proto: "XDBC_BIGINT"), - -4: .same(proto: "XDBC_LONGVARBINARY"), - -3: .same(proto: "XDBC_VARBINARY"), - -2: .same(proto: "XDBC_BINARY"), - -1: .same(proto: "XDBC_LONGVARCHAR"), - 0: .same(proto: "XDBC_UNKNOWN_TYPE"), - 1: .same(proto: "XDBC_CHAR"), - 2: .same(proto: "XDBC_NUMERIC"), - 3: .same(proto: "XDBC_DECIMAL"), - 4: .same(proto: "XDBC_INTEGER"), - 5: .same(proto: "XDBC_SMALLINT"), - 6: .same(proto: "XDBC_FLOAT"), - 7: .same(proto: "XDBC_REAL"), - 8: .same(proto: "XDBC_DOUBLE"), - 9: .same(proto: "XDBC_DATETIME"), - 10: .same(proto: "XDBC_INTERVAL"), - 12: .same(proto: "XDBC_VARCHAR"), - 91: .same(proto: "XDBC_DATE"), - 92: .same(proto: "XDBC_TIME"), - 93: .same(proto: "XDBC_TIMESTAMP"), - ] -} - -extension Arrow_Flight_Protocol_Sql_XdbcDatetimeSubcode: SwiftProtobuf._ProtoNameProviding { - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 0: .same(proto: "XDBC_SUBCODE_UNKNOWN"), - 1: .aliased(proto: "XDBC_SUBCODE_YEAR", aliases: ["XDBC_SUBCODE_DATE"]), - 2: .aliased(proto: "XDBC_SUBCODE_TIME", aliases: ["XDBC_SUBCODE_MONTH"]), - 3: .aliased(proto: "XDBC_SUBCODE_TIMESTAMP", aliases: ["XDBC_SUBCODE_DAY"]), - 4: .aliased(proto: "XDBC_SUBCODE_TIME_WITH_TIMEZONE", aliases: ["XDBC_SUBCODE_HOUR"]), - 5: .aliased(proto: "XDBC_SUBCODE_TIMESTAMP_WITH_TIMEZONE", aliases: ["XDBC_SUBCODE_MINUTE"]), - 6: .same(proto: "XDBC_SUBCODE_SECOND"), - 7: .same(proto: "XDBC_SUBCODE_YEAR_TO_MONTH"), - 8: .same(proto: "XDBC_SUBCODE_DAY_TO_HOUR"), - 9: .same(proto: "XDBC_SUBCODE_DAY_TO_MINUTE"), - 10: .same(proto: "XDBC_SUBCODE_DAY_TO_SECOND"), - 11: .same(proto: "XDBC_SUBCODE_HOUR_TO_MINUTE"), - 12: .same(proto: "XDBC_SUBCODE_HOUR_TO_SECOND"), - 13: .same(proto: "XDBC_SUBCODE_MINUTE_TO_SECOND"), - 101: .same(proto: "XDBC_SUBCODE_INTERVAL_YEAR"), - 102: .same(proto: "XDBC_SUBCODE_INTERVAL_MONTH"), - 103: .same(proto: "XDBC_SUBCODE_INTERVAL_DAY"), - 104: .same(proto: "XDBC_SUBCODE_INTERVAL_HOUR"), - 105: .same(proto: "XDBC_SUBCODE_INTERVAL_MINUTE"), - 106: .same(proto: "XDBC_SUBCODE_INTERVAL_SECOND"), - 107: .same(proto: "XDBC_SUBCODE_INTERVAL_YEAR_TO_MONTH"), - 108: .same(proto: "XDBC_SUBCODE_INTERVAL_DAY_TO_HOUR"), - 109: .same(proto: "XDBC_SUBCODE_INTERVAL_DAY_TO_MINUTE"), - 110: .same(proto: "XDBC_SUBCODE_INTERVAL_DAY_TO_SECOND"), - 111: .same(proto: "XDBC_SUBCODE_INTERVAL_HOUR_TO_MINUTE"), - 112: .same(proto: "XDBC_SUBCODE_INTERVAL_HOUR_TO_SECOND"), - 113: .same(proto: "XDBC_SUBCODE_INTERVAL_MINUTE_TO_SECOND"), - ] -} - -extension Arrow_Flight_Protocol_Sql_Nullable: SwiftProtobuf._ProtoNameProviding { - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 0: .same(proto: "NULLABILITY_NO_NULLS"), - 1: .same(proto: "NULLABILITY_NULLABLE"), - 2: .same(proto: "NULLABILITY_UNKNOWN"), - ] -} - -extension Arrow_Flight_Protocol_Sql_Searchable: SwiftProtobuf._ProtoNameProviding { - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 0: .same(proto: "SEARCHABLE_NONE"), - 1: .same(proto: "SEARCHABLE_CHAR"), - 2: .same(proto: "SEARCHABLE_BASIC"), - 3: .same(proto: "SEARCHABLE_FULL"), - ] -} - -extension Arrow_Flight_Protocol_Sql_UpdateDeleteRules: SwiftProtobuf._ProtoNameProviding { - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 0: .same(proto: "CASCADE"), - 1: .same(proto: "RESTRICT"), - 2: .same(proto: "SET_NULL"), - 3: .same(proto: "NO_ACTION"), - 4: .same(proto: "SET_DEFAULT"), - ] -} - -extension Arrow_Flight_Protocol_Sql_CommandGetSqlInfo: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - static let protoMessageName: String = _protobuf_package + ".CommandGetSqlInfo" - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .same(proto: "info"), - ] - - mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every case branch when no optimizations are - // enabled. https://github.com/apple/swift-protobuf/issues/1034 - switch fieldNumber { - case 1: try { try decoder.decodeRepeatedUInt32Field(value: &self.info) }() - default: break - } - } - } - - func traverse(visitor: inout V) throws { - if !self.info.isEmpty { - try visitor.visitPackedUInt32Field(value: self.info, fieldNumber: 1) - } - try unknownFields.traverse(visitor: &visitor) - } - - static func ==(lhs: Arrow_Flight_Protocol_Sql_CommandGetSqlInfo, rhs: Arrow_Flight_Protocol_Sql_CommandGetSqlInfo) -> Bool { - if lhs.info != rhs.info {return false} - if lhs.unknownFields != rhs.unknownFields {return false} - return true - } -} - -extension Arrow_Flight_Protocol_Sql_CommandGetXdbcTypeInfo: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - static let protoMessageName: String = _protobuf_package + ".CommandGetXdbcTypeInfo" - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .standard(proto: "data_type"), - ] - - mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every case branch when no optimizations are - // enabled. https://github.com/apple/swift-protobuf/issues/1034 - switch fieldNumber { - case 1: try { try decoder.decodeSingularInt32Field(value: &self._dataType) }() - default: break - } - } - } - - func traverse(visitor: inout V) throws { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every if/case branch local when no optimizations - // are enabled. https://github.com/apple/swift-protobuf/issues/1034 and - // https://github.com/apple/swift-protobuf/issues/1182 - try { if let v = self._dataType { - try visitor.visitSingularInt32Field(value: v, fieldNumber: 1) - } }() - try unknownFields.traverse(visitor: &visitor) - } - - static func ==(lhs: Arrow_Flight_Protocol_Sql_CommandGetXdbcTypeInfo, rhs: Arrow_Flight_Protocol_Sql_CommandGetXdbcTypeInfo) -> Bool { - if lhs._dataType != rhs._dataType {return false} - if lhs.unknownFields != rhs.unknownFields {return false} - return true - } -} - -extension Arrow_Flight_Protocol_Sql_CommandGetCatalogs: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - static let protoMessageName: String = _protobuf_package + ".CommandGetCatalogs" - static let _protobuf_nameMap = SwiftProtobuf._NameMap() - - mutating func decodeMessage(decoder: inout D) throws { - while let _ = try decoder.nextFieldNumber() { - } - } - - func traverse(visitor: inout V) throws { - try unknownFields.traverse(visitor: &visitor) - } - - static func ==(lhs: Arrow_Flight_Protocol_Sql_CommandGetCatalogs, rhs: Arrow_Flight_Protocol_Sql_CommandGetCatalogs) -> Bool { - if lhs.unknownFields != rhs.unknownFields {return false} - return true - } -} - -extension Arrow_Flight_Protocol_Sql_CommandGetDbSchemas: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - static let protoMessageName: String = _protobuf_package + ".CommandGetDbSchemas" - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .same(proto: "catalog"), - 2: .standard(proto: "db_schema_filter_pattern"), - ] - - mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every case branch when no optimizations are - // enabled. https://github.com/apple/swift-protobuf/issues/1034 - switch fieldNumber { - case 1: try { try decoder.decodeSingularStringField(value: &self._catalog) }() - case 2: try { try decoder.decodeSingularStringField(value: &self._dbSchemaFilterPattern) }() - default: break - } - } - } - - func traverse(visitor: inout V) throws { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every if/case branch local when no optimizations - // are enabled. https://github.com/apple/swift-protobuf/issues/1034 and - // https://github.com/apple/swift-protobuf/issues/1182 - try { if let v = self._catalog { - try visitor.visitSingularStringField(value: v, fieldNumber: 1) - } }() - try { if let v = self._dbSchemaFilterPattern { - try visitor.visitSingularStringField(value: v, fieldNumber: 2) - } }() - try unknownFields.traverse(visitor: &visitor) - } - - static func ==(lhs: Arrow_Flight_Protocol_Sql_CommandGetDbSchemas, rhs: Arrow_Flight_Protocol_Sql_CommandGetDbSchemas) -> Bool { - if lhs._catalog != rhs._catalog {return false} - if lhs._dbSchemaFilterPattern != rhs._dbSchemaFilterPattern {return false} - if lhs.unknownFields != rhs.unknownFields {return false} - return true - } -} - -extension Arrow_Flight_Protocol_Sql_CommandGetTables: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - static let protoMessageName: String = _protobuf_package + ".CommandGetTables" - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .same(proto: "catalog"), - 2: .standard(proto: "db_schema_filter_pattern"), - 3: .standard(proto: "table_name_filter_pattern"), - 4: .standard(proto: "table_types"), - 5: .standard(proto: "include_schema"), - ] - - mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every case branch when no optimizations are - // enabled. https://github.com/apple/swift-protobuf/issues/1034 - switch fieldNumber { - case 1: try { try decoder.decodeSingularStringField(value: &self._catalog) }() - case 2: try { try decoder.decodeSingularStringField(value: &self._dbSchemaFilterPattern) }() - case 3: try { try decoder.decodeSingularStringField(value: &self._tableNameFilterPattern) }() - case 4: try { try decoder.decodeRepeatedStringField(value: &self.tableTypes) }() - case 5: try { try decoder.decodeSingularBoolField(value: &self.includeSchema) }() - default: break - } - } - } - - func traverse(visitor: inout V) throws { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every if/case branch local when no optimizations - // are enabled. https://github.com/apple/swift-protobuf/issues/1034 and - // https://github.com/apple/swift-protobuf/issues/1182 - try { if let v = self._catalog { - try visitor.visitSingularStringField(value: v, fieldNumber: 1) - } }() - try { if let v = self._dbSchemaFilterPattern { - try visitor.visitSingularStringField(value: v, fieldNumber: 2) - } }() - try { if let v = self._tableNameFilterPattern { - try visitor.visitSingularStringField(value: v, fieldNumber: 3) - } }() - if !self.tableTypes.isEmpty { - try visitor.visitRepeatedStringField(value: self.tableTypes, fieldNumber: 4) - } - if self.includeSchema != false { - try visitor.visitSingularBoolField(value: self.includeSchema, fieldNumber: 5) - } - try unknownFields.traverse(visitor: &visitor) - } - - static func ==(lhs: Arrow_Flight_Protocol_Sql_CommandGetTables, rhs: Arrow_Flight_Protocol_Sql_CommandGetTables) -> Bool { - if lhs._catalog != rhs._catalog {return false} - if lhs._dbSchemaFilterPattern != rhs._dbSchemaFilterPattern {return false} - if lhs._tableNameFilterPattern != rhs._tableNameFilterPattern {return false} - if lhs.tableTypes != rhs.tableTypes {return false} - if lhs.includeSchema != rhs.includeSchema {return false} - if lhs.unknownFields != rhs.unknownFields {return false} - return true - } -} - -extension Arrow_Flight_Protocol_Sql_CommandGetTableTypes: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - static let protoMessageName: String = _protobuf_package + ".CommandGetTableTypes" - static let _protobuf_nameMap = SwiftProtobuf._NameMap() - - mutating func decodeMessage(decoder: inout D) throws { - while let _ = try decoder.nextFieldNumber() { - } - } - - func traverse(visitor: inout V) throws { - try unknownFields.traverse(visitor: &visitor) - } - - static func ==(lhs: Arrow_Flight_Protocol_Sql_CommandGetTableTypes, rhs: Arrow_Flight_Protocol_Sql_CommandGetTableTypes) -> Bool { - if lhs.unknownFields != rhs.unknownFields {return false} - return true - } -} - -extension Arrow_Flight_Protocol_Sql_CommandGetPrimaryKeys: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - static let protoMessageName: String = _protobuf_package + ".CommandGetPrimaryKeys" - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .same(proto: "catalog"), - 2: .standard(proto: "db_schema"), - 3: .same(proto: "table"), - ] - - mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every case branch when no optimizations are - // enabled. https://github.com/apple/swift-protobuf/issues/1034 - switch fieldNumber { - case 1: try { try decoder.decodeSingularStringField(value: &self._catalog) }() - case 2: try { try decoder.decodeSingularStringField(value: &self._dbSchema) }() - case 3: try { try decoder.decodeSingularStringField(value: &self.table) }() - default: break - } - } - } - - func traverse(visitor: inout V) throws { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every if/case branch local when no optimizations - // are enabled. https://github.com/apple/swift-protobuf/issues/1034 and - // https://github.com/apple/swift-protobuf/issues/1182 - try { if let v = self._catalog { - try visitor.visitSingularStringField(value: v, fieldNumber: 1) - } }() - try { if let v = self._dbSchema { - try visitor.visitSingularStringField(value: v, fieldNumber: 2) - } }() - if !self.table.isEmpty { - try visitor.visitSingularStringField(value: self.table, fieldNumber: 3) - } - try unknownFields.traverse(visitor: &visitor) - } - - static func ==(lhs: Arrow_Flight_Protocol_Sql_CommandGetPrimaryKeys, rhs: Arrow_Flight_Protocol_Sql_CommandGetPrimaryKeys) -> Bool { - if lhs._catalog != rhs._catalog {return false} - if lhs._dbSchema != rhs._dbSchema {return false} - if lhs.table != rhs.table {return false} - if lhs.unknownFields != rhs.unknownFields {return false} - return true - } -} - -extension Arrow_Flight_Protocol_Sql_CommandGetExportedKeys: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - static let protoMessageName: String = _protobuf_package + ".CommandGetExportedKeys" - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .same(proto: "catalog"), - 2: .standard(proto: "db_schema"), - 3: .same(proto: "table"), - ] - - mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every case branch when no optimizations are - // enabled. https://github.com/apple/swift-protobuf/issues/1034 - switch fieldNumber { - case 1: try { try decoder.decodeSingularStringField(value: &self._catalog) }() - case 2: try { try decoder.decodeSingularStringField(value: &self._dbSchema) }() - case 3: try { try decoder.decodeSingularStringField(value: &self.table) }() - default: break - } - } - } - - func traverse(visitor: inout V) throws { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every if/case branch local when no optimizations - // are enabled. https://github.com/apple/swift-protobuf/issues/1034 and - // https://github.com/apple/swift-protobuf/issues/1182 - try { if let v = self._catalog { - try visitor.visitSingularStringField(value: v, fieldNumber: 1) - } }() - try { if let v = self._dbSchema { - try visitor.visitSingularStringField(value: v, fieldNumber: 2) - } }() - if !self.table.isEmpty { - try visitor.visitSingularStringField(value: self.table, fieldNumber: 3) - } - try unknownFields.traverse(visitor: &visitor) - } - - static func ==(lhs: Arrow_Flight_Protocol_Sql_CommandGetExportedKeys, rhs: Arrow_Flight_Protocol_Sql_CommandGetExportedKeys) -> Bool { - if lhs._catalog != rhs._catalog {return false} - if lhs._dbSchema != rhs._dbSchema {return false} - if lhs.table != rhs.table {return false} - if lhs.unknownFields != rhs.unknownFields {return false} - return true - } -} - -extension Arrow_Flight_Protocol_Sql_CommandGetImportedKeys: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - static let protoMessageName: String = _protobuf_package + ".CommandGetImportedKeys" - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .same(proto: "catalog"), - 2: .standard(proto: "db_schema"), - 3: .same(proto: "table"), - ] - - mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every case branch when no optimizations are - // enabled. https://github.com/apple/swift-protobuf/issues/1034 - switch fieldNumber { - case 1: try { try decoder.decodeSingularStringField(value: &self._catalog) }() - case 2: try { try decoder.decodeSingularStringField(value: &self._dbSchema) }() - case 3: try { try decoder.decodeSingularStringField(value: &self.table) }() - default: break - } - } - } - - func traverse(visitor: inout V) throws { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every if/case branch local when no optimizations - // are enabled. https://github.com/apple/swift-protobuf/issues/1034 and - // https://github.com/apple/swift-protobuf/issues/1182 - try { if let v = self._catalog { - try visitor.visitSingularStringField(value: v, fieldNumber: 1) - } }() - try { if let v = self._dbSchema { - try visitor.visitSingularStringField(value: v, fieldNumber: 2) - } }() - if !self.table.isEmpty { - try visitor.visitSingularStringField(value: self.table, fieldNumber: 3) - } - try unknownFields.traverse(visitor: &visitor) - } - - static func ==(lhs: Arrow_Flight_Protocol_Sql_CommandGetImportedKeys, rhs: Arrow_Flight_Protocol_Sql_CommandGetImportedKeys) -> Bool { - if lhs._catalog != rhs._catalog {return false} - if lhs._dbSchema != rhs._dbSchema {return false} - if lhs.table != rhs.table {return false} - if lhs.unknownFields != rhs.unknownFields {return false} - return true - } -} - -extension Arrow_Flight_Protocol_Sql_CommandGetCrossReference: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - static let protoMessageName: String = _protobuf_package + ".CommandGetCrossReference" - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .standard(proto: "pk_catalog"), - 2: .standard(proto: "pk_db_schema"), - 3: .standard(proto: "pk_table"), - 4: .standard(proto: "fk_catalog"), - 5: .standard(proto: "fk_db_schema"), - 6: .standard(proto: "fk_table"), - ] - - mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every case branch when no optimizations are - // enabled. https://github.com/apple/swift-protobuf/issues/1034 - switch fieldNumber { - case 1: try { try decoder.decodeSingularStringField(value: &self._pkCatalog) }() - case 2: try { try decoder.decodeSingularStringField(value: &self._pkDbSchema) }() - case 3: try { try decoder.decodeSingularStringField(value: &self.pkTable) }() - case 4: try { try decoder.decodeSingularStringField(value: &self._fkCatalog) }() - case 5: try { try decoder.decodeSingularStringField(value: &self._fkDbSchema) }() - case 6: try { try decoder.decodeSingularStringField(value: &self.fkTable) }() - default: break - } - } - } - - func traverse(visitor: inout V) throws { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every if/case branch local when no optimizations - // are enabled. https://github.com/apple/swift-protobuf/issues/1034 and - // https://github.com/apple/swift-protobuf/issues/1182 - try { if let v = self._pkCatalog { - try visitor.visitSingularStringField(value: v, fieldNumber: 1) - } }() - try { if let v = self._pkDbSchema { - try visitor.visitSingularStringField(value: v, fieldNumber: 2) - } }() - if !self.pkTable.isEmpty { - try visitor.visitSingularStringField(value: self.pkTable, fieldNumber: 3) - } - try { if let v = self._fkCatalog { - try visitor.visitSingularStringField(value: v, fieldNumber: 4) - } }() - try { if let v = self._fkDbSchema { - try visitor.visitSingularStringField(value: v, fieldNumber: 5) - } }() - if !self.fkTable.isEmpty { - try visitor.visitSingularStringField(value: self.fkTable, fieldNumber: 6) - } - try unknownFields.traverse(visitor: &visitor) - } - - static func ==(lhs: Arrow_Flight_Protocol_Sql_CommandGetCrossReference, rhs: Arrow_Flight_Protocol_Sql_CommandGetCrossReference) -> Bool { - if lhs._pkCatalog != rhs._pkCatalog {return false} - if lhs._pkDbSchema != rhs._pkDbSchema {return false} - if lhs.pkTable != rhs.pkTable {return false} - if lhs._fkCatalog != rhs._fkCatalog {return false} - if lhs._fkDbSchema != rhs._fkDbSchema {return false} - if lhs.fkTable != rhs.fkTable {return false} - if lhs.unknownFields != rhs.unknownFields {return false} - return true - } -} - -extension Arrow_Flight_Protocol_Sql_ActionCreatePreparedStatementRequest: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - static let protoMessageName: String = _protobuf_package + ".ActionCreatePreparedStatementRequest" - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .same(proto: "query"), - 2: .standard(proto: "transaction_id"), - ] - - mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every case branch when no optimizations are - // enabled. https://github.com/apple/swift-protobuf/issues/1034 - switch fieldNumber { - case 1: try { try decoder.decodeSingularStringField(value: &self.query) }() - case 2: try { try decoder.decodeSingularBytesField(value: &self._transactionID) }() - default: break - } - } - } - - func traverse(visitor: inout V) throws { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every if/case branch local when no optimizations - // are enabled. https://github.com/apple/swift-protobuf/issues/1034 and - // https://github.com/apple/swift-protobuf/issues/1182 - if !self.query.isEmpty { - try visitor.visitSingularStringField(value: self.query, fieldNumber: 1) - } - try { if let v = self._transactionID { - try visitor.visitSingularBytesField(value: v, fieldNumber: 2) - } }() - try unknownFields.traverse(visitor: &visitor) - } - - static func ==(lhs: Arrow_Flight_Protocol_Sql_ActionCreatePreparedStatementRequest, rhs: Arrow_Flight_Protocol_Sql_ActionCreatePreparedStatementRequest) -> Bool { - if lhs.query != rhs.query {return false} - if lhs._transactionID != rhs._transactionID {return false} - if lhs.unknownFields != rhs.unknownFields {return false} - return true - } -} - -extension Arrow_Flight_Protocol_Sql_SubstraitPlan: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - static let protoMessageName: String = _protobuf_package + ".SubstraitPlan" - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .same(proto: "plan"), - 2: .same(proto: "version"), - ] - - mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every case branch when no optimizations are - // enabled. https://github.com/apple/swift-protobuf/issues/1034 - switch fieldNumber { - case 1: try { try decoder.decodeSingularBytesField(value: &self.plan) }() - case 2: try { try decoder.decodeSingularStringField(value: &self.version) }() - default: break - } - } - } - - func traverse(visitor: inout V) throws { - if !self.plan.isEmpty { - try visitor.visitSingularBytesField(value: self.plan, fieldNumber: 1) - } - if !self.version.isEmpty { - try visitor.visitSingularStringField(value: self.version, fieldNumber: 2) - } - try unknownFields.traverse(visitor: &visitor) - } - - static func ==(lhs: Arrow_Flight_Protocol_Sql_SubstraitPlan, rhs: Arrow_Flight_Protocol_Sql_SubstraitPlan) -> Bool { - if lhs.plan != rhs.plan {return false} - if lhs.version != rhs.version {return false} - if lhs.unknownFields != rhs.unknownFields {return false} - return true - } -} - -extension Arrow_Flight_Protocol_Sql_ActionCreatePreparedSubstraitPlanRequest: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - static let protoMessageName: String = _protobuf_package + ".ActionCreatePreparedSubstraitPlanRequest" - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .same(proto: "plan"), - 2: .standard(proto: "transaction_id"), - ] - - mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every case branch when no optimizations are - // enabled. https://github.com/apple/swift-protobuf/issues/1034 - switch fieldNumber { - case 1: try { try decoder.decodeSingularMessageField(value: &self._plan) }() - case 2: try { try decoder.decodeSingularBytesField(value: &self._transactionID) }() - default: break - } - } - } - - func traverse(visitor: inout V) throws { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every if/case branch local when no optimizations - // are enabled. https://github.com/apple/swift-protobuf/issues/1034 and - // https://github.com/apple/swift-protobuf/issues/1182 - try { if let v = self._plan { - try visitor.visitSingularMessageField(value: v, fieldNumber: 1) - } }() - try { if let v = self._transactionID { - try visitor.visitSingularBytesField(value: v, fieldNumber: 2) - } }() - try unknownFields.traverse(visitor: &visitor) - } - - static func ==(lhs: Arrow_Flight_Protocol_Sql_ActionCreatePreparedSubstraitPlanRequest, rhs: Arrow_Flight_Protocol_Sql_ActionCreatePreparedSubstraitPlanRequest) -> Bool { - if lhs._plan != rhs._plan {return false} - if lhs._transactionID != rhs._transactionID {return false} - if lhs.unknownFields != rhs.unknownFields {return false} - return true - } -} - -extension Arrow_Flight_Protocol_Sql_ActionCreatePreparedStatementResult: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - static let protoMessageName: String = _protobuf_package + ".ActionCreatePreparedStatementResult" - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .standard(proto: "prepared_statement_handle"), - 2: .standard(proto: "dataset_schema"), - 3: .standard(proto: "parameter_schema"), - ] - - mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every case branch when no optimizations are - // enabled. https://github.com/apple/swift-protobuf/issues/1034 - switch fieldNumber { - case 1: try { try decoder.decodeSingularBytesField(value: &self.preparedStatementHandle) }() - case 2: try { try decoder.decodeSingularBytesField(value: &self.datasetSchema) }() - case 3: try { try decoder.decodeSingularBytesField(value: &self.parameterSchema) }() - default: break - } - } - } - - func traverse(visitor: inout V) throws { - if !self.preparedStatementHandle.isEmpty { - try visitor.visitSingularBytesField(value: self.preparedStatementHandle, fieldNumber: 1) - } - if !self.datasetSchema.isEmpty { - try visitor.visitSingularBytesField(value: self.datasetSchema, fieldNumber: 2) - } - if !self.parameterSchema.isEmpty { - try visitor.visitSingularBytesField(value: self.parameterSchema, fieldNumber: 3) - } - try unknownFields.traverse(visitor: &visitor) - } - - static func ==(lhs: Arrow_Flight_Protocol_Sql_ActionCreatePreparedStatementResult, rhs: Arrow_Flight_Protocol_Sql_ActionCreatePreparedStatementResult) -> Bool { - if lhs.preparedStatementHandle != rhs.preparedStatementHandle {return false} - if lhs.datasetSchema != rhs.datasetSchema {return false} - if lhs.parameterSchema != rhs.parameterSchema {return false} - if lhs.unknownFields != rhs.unknownFields {return false} - return true - } -} - -extension Arrow_Flight_Protocol_Sql_ActionClosePreparedStatementRequest: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - static let protoMessageName: String = _protobuf_package + ".ActionClosePreparedStatementRequest" - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .standard(proto: "prepared_statement_handle"), - ] - - mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every case branch when no optimizations are - // enabled. https://github.com/apple/swift-protobuf/issues/1034 - switch fieldNumber { - case 1: try { try decoder.decodeSingularBytesField(value: &self.preparedStatementHandle) }() - default: break - } - } - } - - func traverse(visitor: inout V) throws { - if !self.preparedStatementHandle.isEmpty { - try visitor.visitSingularBytesField(value: self.preparedStatementHandle, fieldNumber: 1) - } - try unknownFields.traverse(visitor: &visitor) - } - - static func ==(lhs: Arrow_Flight_Protocol_Sql_ActionClosePreparedStatementRequest, rhs: Arrow_Flight_Protocol_Sql_ActionClosePreparedStatementRequest) -> Bool { - if lhs.preparedStatementHandle != rhs.preparedStatementHandle {return false} - if lhs.unknownFields != rhs.unknownFields {return false} - return true - } -} - -extension Arrow_Flight_Protocol_Sql_ActionBeginTransactionRequest: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - static let protoMessageName: String = _protobuf_package + ".ActionBeginTransactionRequest" - static let _protobuf_nameMap = SwiftProtobuf._NameMap() - - mutating func decodeMessage(decoder: inout D) throws { - while let _ = try decoder.nextFieldNumber() { - } - } - - func traverse(visitor: inout V) throws { - try unknownFields.traverse(visitor: &visitor) - } - - static func ==(lhs: Arrow_Flight_Protocol_Sql_ActionBeginTransactionRequest, rhs: Arrow_Flight_Protocol_Sql_ActionBeginTransactionRequest) -> Bool { - if lhs.unknownFields != rhs.unknownFields {return false} - return true - } -} - -extension Arrow_Flight_Protocol_Sql_ActionBeginSavepointRequest: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - static let protoMessageName: String = _protobuf_package + ".ActionBeginSavepointRequest" - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .standard(proto: "transaction_id"), - 2: .same(proto: "name"), - ] - - mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every case branch when no optimizations are - // enabled. https://github.com/apple/swift-protobuf/issues/1034 - switch fieldNumber { - case 1: try { try decoder.decodeSingularBytesField(value: &self.transactionID) }() - case 2: try { try decoder.decodeSingularStringField(value: &self.name) }() - default: break - } - } - } - - func traverse(visitor: inout V) throws { - if !self.transactionID.isEmpty { - try visitor.visitSingularBytesField(value: self.transactionID, fieldNumber: 1) - } - if !self.name.isEmpty { - try visitor.visitSingularStringField(value: self.name, fieldNumber: 2) - } - try unknownFields.traverse(visitor: &visitor) - } - - static func ==(lhs: Arrow_Flight_Protocol_Sql_ActionBeginSavepointRequest, rhs: Arrow_Flight_Protocol_Sql_ActionBeginSavepointRequest) -> Bool { - if lhs.transactionID != rhs.transactionID {return false} - if lhs.name != rhs.name {return false} - if lhs.unknownFields != rhs.unknownFields {return false} - return true - } -} - -extension Arrow_Flight_Protocol_Sql_ActionBeginTransactionResult: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - static let protoMessageName: String = _protobuf_package + ".ActionBeginTransactionResult" - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .standard(proto: "transaction_id"), - ] - - mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every case branch when no optimizations are - // enabled. https://github.com/apple/swift-protobuf/issues/1034 - switch fieldNumber { - case 1: try { try decoder.decodeSingularBytesField(value: &self.transactionID) }() - default: break - } - } - } - - func traverse(visitor: inout V) throws { - if !self.transactionID.isEmpty { - try visitor.visitSingularBytesField(value: self.transactionID, fieldNumber: 1) - } - try unknownFields.traverse(visitor: &visitor) - } - - static func ==(lhs: Arrow_Flight_Protocol_Sql_ActionBeginTransactionResult, rhs: Arrow_Flight_Protocol_Sql_ActionBeginTransactionResult) -> Bool { - if lhs.transactionID != rhs.transactionID {return false} - if lhs.unknownFields != rhs.unknownFields {return false} - return true - } -} - -extension Arrow_Flight_Protocol_Sql_ActionBeginSavepointResult: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - static let protoMessageName: String = _protobuf_package + ".ActionBeginSavepointResult" - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .standard(proto: "savepoint_id"), - ] - - mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every case branch when no optimizations are - // enabled. https://github.com/apple/swift-protobuf/issues/1034 - switch fieldNumber { - case 1: try { try decoder.decodeSingularBytesField(value: &self.savepointID) }() - default: break - } - } - } - - func traverse(visitor: inout V) throws { - if !self.savepointID.isEmpty { - try visitor.visitSingularBytesField(value: self.savepointID, fieldNumber: 1) - } - try unknownFields.traverse(visitor: &visitor) - } - - static func ==(lhs: Arrow_Flight_Protocol_Sql_ActionBeginSavepointResult, rhs: Arrow_Flight_Protocol_Sql_ActionBeginSavepointResult) -> Bool { - if lhs.savepointID != rhs.savepointID {return false} - if lhs.unknownFields != rhs.unknownFields {return false} - return true - } -} - -extension Arrow_Flight_Protocol_Sql_ActionEndTransactionRequest: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - static let protoMessageName: String = _protobuf_package + ".ActionEndTransactionRequest" - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .standard(proto: "transaction_id"), - 2: .same(proto: "action"), - ] - - mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every case branch when no optimizations are - // enabled. https://github.com/apple/swift-protobuf/issues/1034 - switch fieldNumber { - case 1: try { try decoder.decodeSingularBytesField(value: &self.transactionID) }() - case 2: try { try decoder.decodeSingularEnumField(value: &self.action) }() - default: break - } - } - } - - func traverse(visitor: inout V) throws { - if !self.transactionID.isEmpty { - try visitor.visitSingularBytesField(value: self.transactionID, fieldNumber: 1) - } - if self.action != .unspecified { - try visitor.visitSingularEnumField(value: self.action, fieldNumber: 2) - } - try unknownFields.traverse(visitor: &visitor) - } - - static func ==(lhs: Arrow_Flight_Protocol_Sql_ActionEndTransactionRequest, rhs: Arrow_Flight_Protocol_Sql_ActionEndTransactionRequest) -> Bool { - if lhs.transactionID != rhs.transactionID {return false} - if lhs.action != rhs.action {return false} - if lhs.unknownFields != rhs.unknownFields {return false} - return true - } -} - -extension Arrow_Flight_Protocol_Sql_ActionEndTransactionRequest.EndTransaction: SwiftProtobuf._ProtoNameProviding { - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 0: .same(proto: "END_TRANSACTION_UNSPECIFIED"), - 1: .same(proto: "END_TRANSACTION_COMMIT"), - 2: .same(proto: "END_TRANSACTION_ROLLBACK"), - ] -} - -extension Arrow_Flight_Protocol_Sql_ActionEndSavepointRequest: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - static let protoMessageName: String = _protobuf_package + ".ActionEndSavepointRequest" - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .standard(proto: "savepoint_id"), - 2: .same(proto: "action"), - ] - - mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every case branch when no optimizations are - // enabled. https://github.com/apple/swift-protobuf/issues/1034 - switch fieldNumber { - case 1: try { try decoder.decodeSingularBytesField(value: &self.savepointID) }() - case 2: try { try decoder.decodeSingularEnumField(value: &self.action) }() - default: break - } - } - } - - func traverse(visitor: inout V) throws { - if !self.savepointID.isEmpty { - try visitor.visitSingularBytesField(value: self.savepointID, fieldNumber: 1) - } - if self.action != .unspecified { - try visitor.visitSingularEnumField(value: self.action, fieldNumber: 2) - } - try unknownFields.traverse(visitor: &visitor) - } - - static func ==(lhs: Arrow_Flight_Protocol_Sql_ActionEndSavepointRequest, rhs: Arrow_Flight_Protocol_Sql_ActionEndSavepointRequest) -> Bool { - if lhs.savepointID != rhs.savepointID {return false} - if lhs.action != rhs.action {return false} - if lhs.unknownFields != rhs.unknownFields {return false} - return true - } -} - -extension Arrow_Flight_Protocol_Sql_ActionEndSavepointRequest.EndSavepoint: SwiftProtobuf._ProtoNameProviding { - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 0: .same(proto: "END_SAVEPOINT_UNSPECIFIED"), - 1: .same(proto: "END_SAVEPOINT_RELEASE"), - 2: .same(proto: "END_SAVEPOINT_ROLLBACK"), - ] -} - -extension Arrow_Flight_Protocol_Sql_CommandStatementQuery: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - static let protoMessageName: String = _protobuf_package + ".CommandStatementQuery" - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .same(proto: "query"), - 2: .standard(proto: "transaction_id"), - ] - - mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every case branch when no optimizations are - // enabled. https://github.com/apple/swift-protobuf/issues/1034 - switch fieldNumber { - case 1: try { try decoder.decodeSingularStringField(value: &self.query) }() - case 2: try { try decoder.decodeSingularBytesField(value: &self._transactionID) }() - default: break - } - } - } - - func traverse(visitor: inout V) throws { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every if/case branch local when no optimizations - // are enabled. https://github.com/apple/swift-protobuf/issues/1034 and - // https://github.com/apple/swift-protobuf/issues/1182 - if !self.query.isEmpty { - try visitor.visitSingularStringField(value: self.query, fieldNumber: 1) - } - try { if let v = self._transactionID { - try visitor.visitSingularBytesField(value: v, fieldNumber: 2) - } }() - try unknownFields.traverse(visitor: &visitor) - } - - static func ==(lhs: Arrow_Flight_Protocol_Sql_CommandStatementQuery, rhs: Arrow_Flight_Protocol_Sql_CommandStatementQuery) -> Bool { - if lhs.query != rhs.query {return false} - if lhs._transactionID != rhs._transactionID {return false} - if lhs.unknownFields != rhs.unknownFields {return false} - return true - } -} - -extension Arrow_Flight_Protocol_Sql_CommandStatementSubstraitPlan: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - static let protoMessageName: String = _protobuf_package + ".CommandStatementSubstraitPlan" - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .same(proto: "plan"), - 2: .standard(proto: "transaction_id"), - ] - - mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every case branch when no optimizations are - // enabled. https://github.com/apple/swift-protobuf/issues/1034 - switch fieldNumber { - case 1: try { try decoder.decodeSingularMessageField(value: &self._plan) }() - case 2: try { try decoder.decodeSingularBytesField(value: &self._transactionID) }() - default: break - } - } - } - - func traverse(visitor: inout V) throws { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every if/case branch local when no optimizations - // are enabled. https://github.com/apple/swift-protobuf/issues/1034 and - // https://github.com/apple/swift-protobuf/issues/1182 - try { if let v = self._plan { - try visitor.visitSingularMessageField(value: v, fieldNumber: 1) - } }() - try { if let v = self._transactionID { - try visitor.visitSingularBytesField(value: v, fieldNumber: 2) - } }() - try unknownFields.traverse(visitor: &visitor) - } - - static func ==(lhs: Arrow_Flight_Protocol_Sql_CommandStatementSubstraitPlan, rhs: Arrow_Flight_Protocol_Sql_CommandStatementSubstraitPlan) -> Bool { - if lhs._plan != rhs._plan {return false} - if lhs._transactionID != rhs._transactionID {return false} - if lhs.unknownFields != rhs.unknownFields {return false} - return true - } -} - -extension Arrow_Flight_Protocol_Sql_TicketStatementQuery: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - static let protoMessageName: String = _protobuf_package + ".TicketStatementQuery" - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .standard(proto: "statement_handle"), - ] - - mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every case branch when no optimizations are - // enabled. https://github.com/apple/swift-protobuf/issues/1034 - switch fieldNumber { - case 1: try { try decoder.decodeSingularBytesField(value: &self.statementHandle) }() - default: break - } - } - } - - func traverse(visitor: inout V) throws { - if !self.statementHandle.isEmpty { - try visitor.visitSingularBytesField(value: self.statementHandle, fieldNumber: 1) - } - try unknownFields.traverse(visitor: &visitor) - } - - static func ==(lhs: Arrow_Flight_Protocol_Sql_TicketStatementQuery, rhs: Arrow_Flight_Protocol_Sql_TicketStatementQuery) -> Bool { - if lhs.statementHandle != rhs.statementHandle {return false} - if lhs.unknownFields != rhs.unknownFields {return false} - return true - } -} - -extension Arrow_Flight_Protocol_Sql_CommandPreparedStatementQuery: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - static let protoMessageName: String = _protobuf_package + ".CommandPreparedStatementQuery" - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .standard(proto: "prepared_statement_handle"), - ] - - mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every case branch when no optimizations are - // enabled. https://github.com/apple/swift-protobuf/issues/1034 - switch fieldNumber { - case 1: try { try decoder.decodeSingularBytesField(value: &self.preparedStatementHandle) }() - default: break - } - } - } - - func traverse(visitor: inout V) throws { - if !self.preparedStatementHandle.isEmpty { - try visitor.visitSingularBytesField(value: self.preparedStatementHandle, fieldNumber: 1) - } - try unknownFields.traverse(visitor: &visitor) - } - - static func ==(lhs: Arrow_Flight_Protocol_Sql_CommandPreparedStatementQuery, rhs: Arrow_Flight_Protocol_Sql_CommandPreparedStatementQuery) -> Bool { - if lhs.preparedStatementHandle != rhs.preparedStatementHandle {return false} - if lhs.unknownFields != rhs.unknownFields {return false} - return true - } -} - -extension Arrow_Flight_Protocol_Sql_CommandStatementUpdate: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - static let protoMessageName: String = _protobuf_package + ".CommandStatementUpdate" - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .same(proto: "query"), - 2: .standard(proto: "transaction_id"), - ] - - mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every case branch when no optimizations are - // enabled. https://github.com/apple/swift-protobuf/issues/1034 - switch fieldNumber { - case 1: try { try decoder.decodeSingularStringField(value: &self.query) }() - case 2: try { try decoder.decodeSingularBytesField(value: &self._transactionID) }() - default: break - } - } - } - - func traverse(visitor: inout V) throws { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every if/case branch local when no optimizations - // are enabled. https://github.com/apple/swift-protobuf/issues/1034 and - // https://github.com/apple/swift-protobuf/issues/1182 - if !self.query.isEmpty { - try visitor.visitSingularStringField(value: self.query, fieldNumber: 1) - } - try { if let v = self._transactionID { - try visitor.visitSingularBytesField(value: v, fieldNumber: 2) - } }() - try unknownFields.traverse(visitor: &visitor) - } - - static func ==(lhs: Arrow_Flight_Protocol_Sql_CommandStatementUpdate, rhs: Arrow_Flight_Protocol_Sql_CommandStatementUpdate) -> Bool { - if lhs.query != rhs.query {return false} - if lhs._transactionID != rhs._transactionID {return false} - if lhs.unknownFields != rhs.unknownFields {return false} - return true - } -} - -extension Arrow_Flight_Protocol_Sql_CommandPreparedStatementUpdate: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - static let protoMessageName: String = _protobuf_package + ".CommandPreparedStatementUpdate" - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .standard(proto: "prepared_statement_handle"), - ] - - mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every case branch when no optimizations are - // enabled. https://github.com/apple/swift-protobuf/issues/1034 - switch fieldNumber { - case 1: try { try decoder.decodeSingularBytesField(value: &self.preparedStatementHandle) }() - default: break - } - } - } - - func traverse(visitor: inout V) throws { - if !self.preparedStatementHandle.isEmpty { - try visitor.visitSingularBytesField(value: self.preparedStatementHandle, fieldNumber: 1) - } - try unknownFields.traverse(visitor: &visitor) - } - - static func ==(lhs: Arrow_Flight_Protocol_Sql_CommandPreparedStatementUpdate, rhs: Arrow_Flight_Protocol_Sql_CommandPreparedStatementUpdate) -> Bool { - if lhs.preparedStatementHandle != rhs.preparedStatementHandle {return false} - if lhs.unknownFields != rhs.unknownFields {return false} - return true - } -} - -extension Arrow_Flight_Protocol_Sql_DoPutUpdateResult: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - static let protoMessageName: String = _protobuf_package + ".DoPutUpdateResult" - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .standard(proto: "record_count"), - ] - - mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every case branch when no optimizations are - // enabled. https://github.com/apple/swift-protobuf/issues/1034 - switch fieldNumber { - case 1: try { try decoder.decodeSingularInt64Field(value: &self.recordCount) }() - default: break - } - } - } - - func traverse(visitor: inout V) throws { - if self.recordCount != 0 { - try visitor.visitSingularInt64Field(value: self.recordCount, fieldNumber: 1) - } - try unknownFields.traverse(visitor: &visitor) - } - - static func ==(lhs: Arrow_Flight_Protocol_Sql_DoPutUpdateResult, rhs: Arrow_Flight_Protocol_Sql_DoPutUpdateResult) -> Bool { - if lhs.recordCount != rhs.recordCount {return false} - if lhs.unknownFields != rhs.unknownFields {return false} - return true - } -} - -extension Arrow_Flight_Protocol_Sql_ActionCancelQueryRequest: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - static let protoMessageName: String = _protobuf_package + ".ActionCancelQueryRequest" - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .same(proto: "info"), - ] - - mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every case branch when no optimizations are - // enabled. https://github.com/apple/swift-protobuf/issues/1034 - switch fieldNumber { - case 1: try { try decoder.decodeSingularBytesField(value: &self.info) }() - default: break - } - } - } - - func traverse(visitor: inout V) throws { - if !self.info.isEmpty { - try visitor.visitSingularBytesField(value: self.info, fieldNumber: 1) - } - try unknownFields.traverse(visitor: &visitor) - } - - static func ==(lhs: Arrow_Flight_Protocol_Sql_ActionCancelQueryRequest, rhs: Arrow_Flight_Protocol_Sql_ActionCancelQueryRequest) -> Bool { - if lhs.info != rhs.info {return false} - if lhs.unknownFields != rhs.unknownFields {return false} - return true - } -} - -extension Arrow_Flight_Protocol_Sql_ActionCancelQueryResult: SwiftProtobuf.Message, SwiftProtobuf._MessageImplementationBase, SwiftProtobuf._ProtoNameProviding { - static let protoMessageName: String = _protobuf_package + ".ActionCancelQueryResult" - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 1: .same(proto: "result"), - ] - - mutating func decodeMessage(decoder: inout D) throws { - while let fieldNumber = try decoder.nextFieldNumber() { - // The use of inline closures is to circumvent an issue where the compiler - // allocates stack space for every case branch when no optimizations are - // enabled. https://github.com/apple/swift-protobuf/issues/1034 - switch fieldNumber { - case 1: try { try decoder.decodeSingularEnumField(value: &self.result) }() - default: break - } - } - } - - func traverse(visitor: inout V) throws { - if self.result != .unspecified { - try visitor.visitSingularEnumField(value: self.result, fieldNumber: 1) - } - try unknownFields.traverse(visitor: &visitor) - } - - static func ==(lhs: Arrow_Flight_Protocol_Sql_ActionCancelQueryResult, rhs: Arrow_Flight_Protocol_Sql_ActionCancelQueryResult) -> Bool { - if lhs.result != rhs.result {return false} - if lhs.unknownFields != rhs.unknownFields {return false} - return true - } -} - -extension Arrow_Flight_Protocol_Sql_ActionCancelQueryResult.CancelResult: SwiftProtobuf._ProtoNameProviding { - static let _protobuf_nameMap: SwiftProtobuf._NameMap = [ - 0: .same(proto: "CANCEL_RESULT_UNSPECIFIED"), - 1: .same(proto: "CANCEL_RESULT_CANCELLED"), - 2: .same(proto: "CANCEL_RESULT_CANCELLING"), - 3: .same(proto: "CANCEL_RESULT_NOT_CANCELLABLE"), - ] -} diff --git a/swift/ArrowFlight/Sources/ArrowFlight/FlightTicket.swift b/swift/ArrowFlight/Sources/ArrowFlight/FlightTicket.swift deleted file mode 100644 index ab3cb321c62..00000000000 --- a/swift/ArrowFlight/Sources/ArrowFlight/FlightTicket.swift +++ /dev/null @@ -1,35 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -import Foundation - -public class FlightTicket { - public let data: Data - init(_ ticket: Arrow_Flight_Protocol_Ticket) { - self.data = ticket.ticket - } - - public init(_ data: Data) { - self.data = data - } - - func toProtocol() -> Arrow_Flight_Protocol_Ticket { - var ticket = Arrow_Flight_Protocol_Ticket() - ticket.ticket = self.data - return ticket - } -} diff --git a/swift/ArrowFlight/Sources/ArrowFlight/RecordBatchStreamReader.swift b/swift/ArrowFlight/Sources/ArrowFlight/RecordBatchStreamReader.swift deleted file mode 100644 index 464752dbcbe..00000000000 --- a/swift/ArrowFlight/Sources/ArrowFlight/RecordBatchStreamReader.swift +++ /dev/null @@ -1,81 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -import Foundation -import Arrow -import GRPC - -public class RecordBatchStreamReader: AsyncSequence, AsyncIteratorProtocol { - public typealias AsyncIterator = RecordBatchStreamReader - public typealias Element = (Arrow.RecordBatch?, FlightDescriptor?) - let reader = ArrowReader() - var batches = [RecordBatch]() - var descriptor: FlightDescriptor? - var batchIndex = 0 - var streamIterator: any AsyncIteratorProtocol - var useUnalignedBuffers: Bool - let stream: GRPC.GRPCAsyncRequestStream - init(_ stream: GRPC.GRPCAsyncRequestStream, - useUnalignedBuffers: Bool = false) { - self.stream = stream - self.streamIterator = self.stream.makeAsyncIterator() - self.useUnalignedBuffers = useUnalignedBuffers - } - - public func next() async throws -> (Arrow.RecordBatch?, FlightDescriptor?)? { - guard !Task.isCancelled else { - return nil - } - - if batchIndex < batches.count { - let batch = batches[batchIndex] - batchIndex += 1 - return (batch, descriptor) - } - - let result = ArrowReader.makeArrowReaderResult() - while true { - let streamData = try await self.streamIterator.next() - if streamData == nil { - return nil - } - - let flightData = (streamData as? Arrow_Flight_Protocol_FlightData)! - let dataBody = flightData.dataBody - let dataHeader = flightData.dataHeader - descriptor = FlightDescriptor(flightData.flightDescriptor) - switch reader.fromMessage( - dataHeader, - dataBody: dataBody, - result: result, - useUnalignedBuffers: useUnalignedBuffers) { - case .success(()): - if result.batches.count > 0 { - batches = result.batches - batchIndex = 1 - return (batches[0], descriptor) - } - case .failure(let error): - throw error - } - } - } - - public func makeAsyncIterator() -> RecordBatchStreamReader { - self - } -} diff --git a/swift/ArrowFlight/Sources/ArrowFlight/RecordBatchStreamWriter.swift b/swift/ArrowFlight/Sources/ArrowFlight/RecordBatchStreamWriter.swift deleted file mode 100644 index d3e03fe17ce..00000000000 --- a/swift/ArrowFlight/Sources/ArrowFlight/RecordBatchStreamWriter.swift +++ /dev/null @@ -1,96 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -import Foundation -import Arrow -import GRPC - -public class ActionTypeStreamWriter { - let stream: GRPCAsyncResponseStreamWriter - init(_ stream: GRPCAsyncResponseStreamWriter) { - self.stream = stream - } - - public func write(_ actionType: FlightActionType) async throws { - try await self.stream.send(actionType.toProtocol()) - } -} - -public class ResultStreamWriter { - let stream: GRPCAsyncResponseStreamWriter - init(_ stream: GRPCAsyncResponseStreamWriter) { - self.stream = stream - } - - public func write(_ result: FlightResult) async throws { - try await self.stream.send(result.toProtocol()) - } -} - -public class FlightInfoStreamWriter { - let stream: GRPCAsyncResponseStreamWriter - init(_ stream: GRPCAsyncResponseStreamWriter) { - self.stream = stream - } - - public func write(_ result: FlightInfo) async throws { - try await self.stream.send(result.toProtocol()) - } -} - -public class PutResultDataStreamWriter { - let stream: GRPCAsyncResponseStreamWriter - init(_ stream: GRPCAsyncResponseStreamWriter) { - self.stream = stream - } - - public func write(_ result: FlightPutResult) async throws { - try await self.stream.send(result.toProtocol()) - } -} - -public class RecordBatchStreamWriter { - let writer = ArrowWriter() - let stream: GRPCAsyncResponseStreamWriter - init(_ stream: GRPCAsyncResponseStreamWriter) { - self.stream = stream - } - - public func write(_ rb: RecordBatch) async throws { - switch writer.toMessage(rb.schema) { - case .success(let schemaData): - let schemaFlightData = Arrow_Flight_Protocol_FlightData.with { - $0.dataHeader = schemaData - } - - try await self.stream.send(schemaFlightData) - switch writer.toMessage(rb) { - case .success(let recordMessages): - let rbMessage = Arrow_Flight_Protocol_FlightData.with { - $0.dataHeader = recordMessages[0] - $0.dataBody = recordMessages[1] - } - - try await self.stream.send(rbMessage) - case .failure(let error): - throw error - } - case .failure(let error): - throw error - } - } -} diff --git a/swift/ArrowFlight/Tests/ArrowFlightTests/FlightTest.swift b/swift/ArrowFlight/Tests/ArrowFlightTests/FlightTest.swift deleted file mode 100644 index f7bc3c1ccb0..00000000000 --- a/swift/ArrowFlight/Tests/ArrowFlightTests/FlightTest.swift +++ /dev/null @@ -1,357 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -import XCTest -import struct Foundation.Data -import struct Foundation.URL -import GRPC -import NIOCore -import NIOPosix -import Arrow - -@testable import ArrowFlight - -func makeSchema() -> ArrowSchema { - let schemaBuilder = ArrowSchema.Builder() - return schemaBuilder.addField("col1", type: ArrowType(ArrowType.ArrowDouble), isNullable: true) - .addField("col2", type: ArrowType(ArrowType.ArrowString), isNullable: false) - .addField("col3", type: ArrowType(ArrowType.ArrowDate32), isNullable: false) - .finish() -} - -func makeRecordBatch() throws -> RecordBatch { - let doubleBuilder: NumberArrayBuilder = try ArrowArrayBuilders.loadNumberArrayBuilder() - doubleBuilder.append(11.11) - doubleBuilder.append(22.22) - doubleBuilder.append(33.33) - doubleBuilder.append(44.44) - let stringBuilder = try ArrowArrayBuilders.loadStringArrayBuilder() - stringBuilder.append("test10") - stringBuilder.append("test22") - stringBuilder.append("test33") - stringBuilder.append("test44") - let date32Builder = try ArrowArrayBuilders.loadDate32ArrayBuilder() - let date2 = Date(timeIntervalSinceReferenceDate: 86400 * 1) - let date1 = Date(timeIntervalSinceReferenceDate: 86400 * 5000 + 352) - date32Builder.append(date1) - date32Builder.append(date2) - date32Builder.append(date1) - date32Builder.append(date2) - let doubleHolder = ArrowArrayHolderImpl(try doubleBuilder.finish()) - let stringHolder = ArrowArrayHolderImpl(try stringBuilder.finish()) - let date32Holder = ArrowArrayHolderImpl(try date32Builder.finish()) - let result = RecordBatch.Builder() - .addColumn("col1", arrowArray: doubleHolder) - .addColumn("col2", arrowArray: stringHolder) - .addColumn("col3", arrowArray: date32Holder) - .finish() - switch result { - case .success(let recordBatch): - return recordBatch - case .failure(let error): - throw error - } -} - -var flights = [String: FlightInfo]() -final class MyFlightServer: ArrowFlightServer { - func doExchange( - _ reader: ArrowFlight.RecordBatchStreamReader, - writer: ArrowFlight.RecordBatchStreamWriter) async throws { - do { - for try await rbData in reader { - let rb = rbData.0! - XCTAssertEqual(rb.schema.fields.count, 3) - XCTAssertEqual(rb.length, 4) - } - - let rb = try makeRecordBatch() - try await writer.write(rb) - } catch { - print("Unknown error: \(error)") - } - } - - func doPut( - _ reader: ArrowFlight.RecordBatchStreamReader, - writer: ArrowFlight.PutResultDataStreamWriter) async throws { - for try await rbData in reader { - let rb = rbData.0! - let key = String(decoding: rbData.1!.cmd, as: UTF8.self) - flights[key] = try FlightInfo(schemaToMessage(rb.schema), endpoints: [], descriptor: rbData.1) - XCTAssertEqual(rb.schema.fields.count, 3) - XCTAssertEqual(rb.length, 4) - try await writer.write(FlightPutResult()) - } - } - - func doGet(_ ticket: ArrowFlight.FlightTicket, writer: ArrowFlight.RecordBatchStreamWriter) async throws { - try await writer.write(try makeRecordBatch()) - } - - func getSchema(_ request: ArrowFlight.FlightDescriptor) async throws -> ArrowFlight.FlightSchemaResult { - XCTAssertEqual(String(bytes: request.cmd, encoding: .utf8)!, "schema info") - XCTAssertEqual(request.type, .cmd) - return try ArrowFlight.FlightSchemaResult(schemaToMessage(makeSchema())) - } - - func getFlightInfo(_ request: ArrowFlight.FlightDescriptor) async throws -> ArrowFlight.FlightInfo { - let key = String(decoding: request.cmd, as: UTF8.self) - if flights[key] != nil { - return ArrowFlight.FlightInfo(flights[key]!.toProtocol()) - } - - throw ArrowFlightError.ioError("Flight not found") - } - - func listFlights(_ criteria: ArrowFlight.FlightCriteria, writer: ArrowFlight.FlightInfoStreamWriter) async throws { - XCTAssertEqual(String(bytes: criteria.expression, encoding: .utf8), "flight criteria expression") - for flightData in flights { - try await writer.write(flightData.value) - } - } - - func listActions(_ writer: ArrowFlight.ActionTypeStreamWriter) async throws { - try await writer.write(FlightActionType("clear", description: "Clear the stored flights.")) - try await writer.write(FlightActionType("shutdown", description: "Shut down this server.")) - } - - func doAction(_ action: FlightAction, writer: ResultStreamWriter) async throws { - XCTAssertEqual(action.type, "healthcheck") - XCTAssertEqual(String(bytes: action.body, encoding: .utf8)!, "healthcheck body") - try await writer.write(FlightResult("test_action result".data(using: .utf8)!)) - } -} - -struct FlightServerImpl { - var port = 1234 - static var server: Server? - static var group: MultiThreadedEventLoopGroup? - static func run() async throws { - do { - // Create an event loop group for the server to run on. - let group = MultiThreadedEventLoopGroup(numberOfThreads: System.coreCount) - // Create a provider using the features we read. - let provider = ArrowFlight.makeFlightServer(MyFlightServer()) - - // Start the server and print its address once it has started. - FlightServerImpl.server = try await Server.insecure(group: group) - .withServiceProviders([provider]) - .bind(host: "localhost", port: 8088) - .get() - - print("server started on port \(server!.channel.localAddress!.port!)") - // Wait on the server's `onClose` future to stop the program from exiting. - } catch { - print("Unknown server error: \(error)") - } - } -} - -public class FlightClientTester { - var client: FlightClient? - var group: MultiThreadedEventLoopGroup? - var channel: GRPCChannel? - - init() async throws { - // Load the features. - let group = PlatformSupport.makeEventLoopGroup(loopCount: 1) - let channel = try GRPCChannelPool.with( - target: .host("localhost", port: 8088), - transportSecurity: .plaintext, - eventLoopGroup: group - ) - - client = FlightClient(channel: channel) - } - - deinit { - try? group?.syncShutdownGracefully() - try? channel?.close().wait() - } - - func listActionTest() async throws { - var actionTypes = [FlightActionType]() - try await client?.listActions({ action in - actionTypes.append(action) - }) - - XCTAssertEqual(actionTypes.count, 2) - - XCTAssertEqual(actionTypes[0].type, "clear") - XCTAssertEqual(actionTypes[0].description, "Clear the stored flights.") - XCTAssertEqual(actionTypes[1].type, "shutdown") - XCTAssertEqual(actionTypes[1].description, "Shut down this server.") - } - - func listFlightsTest() async throws { - let flightCriteria = FlightCriteria("flight criteria expression".data(using: .utf8)!) - var numCalls = 0 - try await client?.listFlights(flightCriteria, closure: { data in - if let schema = data.schema { - XCTAssertGreaterThanOrEqual(schema.fields.count, 0) - numCalls += 1 - } - }) - - XCTAssertEqual(numCalls, 2) - } - - func doActionTest(_ type: String, actionBody: Data) async throws { - let action = FlightAction(type, body: actionBody) - var actionResults = [FlightResult]() - try await client?.doAction(action, closure: { result in - actionResults.append(result) - }) - - XCTAssertEqual(actionResults.count, 1) - XCTAssertEqual(String(bytes: actionResults[0].body, encoding: .utf8), "test_action result") - } - - func getSchemaTest(_ cmd: Data) async throws { - let descriptor = FlightDescriptor(cmd: cmd) - let schemaResult = try await client?.getSchema(descriptor) - let schema = schemaResult!.schema! - XCTAssertEqual(schema.fields.count, 3) - } - - func doGetTest(_ flightData: Data) async throws { - let ticket = FlightTicket(flightData) - var numCall = 0 - try await client?.doGet(ticket, readerResultClosure: { rb in - numCall += 1 - XCTAssertEqual(rb.schema!.fields.count, 3) - XCTAssertEqual(rb.batches[0].length, 4) - switch ArrowTable.from(recordBatches: rb.batches) { - case .success(let table): - for column in table.columns { - switch column.type.id { - case .double: - let doubleArray = column.data() as? ChunkedArray - XCTAssertNotNil(doubleArray) - XCTAssertEqual(doubleArray?[0], 11.11) - XCTAssertEqual(doubleArray?.asString(0), "11.11") - default: - continue - } - } - case .failure(let error): - throw error - } - }) - - XCTAssertEqual(numCall, 1) - } - - func doGetTestFlightData(_ flightData: Data) async throws { - let ticket = FlightTicket(flightData) - var numCall = 0 - let reader = ArrowReader() - let arrowResult = ArrowReader.makeArrowReaderResult() - try await client?.doGet(ticket, flightDataClosure: { flightData in - switch reader.fromMessage(flightData.dataHeader, dataBody: flightData.dataBody, result: arrowResult) { - case .success: - numCall += 1 - case .failure(let error): - throw error - } - }) - - XCTAssertEqual(numCall, 2) - } - - func doPutTest(_ cmd: String) async throws { - let descriptor = FlightDescriptor(cmd: cmd.data(using: .utf8)!) - let rb = try makeRecordBatch() - var numCall = 0 - try await client?.doPut(descriptor, recordBatches: [rb], closure: { _ in - numCall += 1 - }) - - XCTAssertEqual(numCall, 1) - } - - func doExchangeTest() async throws { - let descriptor = FlightDescriptor(cmd: "flight_ticket".data(using: .utf8)!) - let rb = try makeRecordBatch() - var numCall = 0 - try await client?.doExchange(descriptor, recordBatches: [rb], closure: { result in - numCall += 1 - XCTAssertEqual(result.schema?.fields.count, 3) - XCTAssertEqual(result.batches[0].length, 4) - }) - - XCTAssertEqual(numCall, 1) - } -} - -actor FlightServerData { - public var serverup = false - func setServerUp(_ serverUp: Bool) { - self.serverup = serverUp - } - - func isServerUp() -> Bool { - return serverup - } -} - -final class FlightTest: XCTestCase { - let serverData = FlightServerData() - - func testFlightServer() async throws { - let basicTask = Task { - try await FlightServerImpl.run() - defer { - print("server shutting down") - do { - try FlightServerImpl.group?.syncShutdownGracefully() - } catch { - } - } - - await serverData.setServerUp(true) - try await FlightServerImpl.server?.onClose.get() - return "done" - } - - let secondTask = Task { - defer { - _ = FlightServerImpl.server?.close() - } - - while await !serverData.isServerUp() { - try await Task.sleep(nanoseconds: 1_000_000) - } - - let clientImpl = try await FlightClientTester() - try await clientImpl.listActionTest() - try await clientImpl.doPutTest("flight_ticket") - try await clientImpl.doPutTest("flight_another") - try await clientImpl.listFlightsTest() - try await clientImpl.doActionTest("healthcheck", actionBody: Data("healthcheck body".utf8)) - try await clientImpl.getSchemaTest(Data("schema info".utf8)) - try await clientImpl.doGetTest(Data("'flight_ticket'".utf8)) - try await clientImpl.doGetTestFlightData(Data("'flight_another'".utf8)) - try await clientImpl.doExchangeTest() - return "done" - } - - _ = try await [basicTask.value, secondTask.value] - print("done running") - } -} diff --git a/swift/CDataWGo/.gitignore b/swift/CDataWGo/.gitignore deleted file mode 100644 index 0023a534063..00000000000 --- a/swift/CDataWGo/.gitignore +++ /dev/null @@ -1,8 +0,0 @@ -.DS_Store -/.build -/Packages -xcuserdata/ -DerivedData/ -.swiftpm/configuration/registries.json -.swiftpm/xcode/package.xcworkspace/contents.xcworkspacedata -.netrc diff --git a/swift/CDataWGo/Package.swift b/swift/CDataWGo/Package.swift deleted file mode 100644 index 4d517115aca..00000000000 --- a/swift/CDataWGo/Package.swift +++ /dev/null @@ -1,43 +0,0 @@ -// swift-tools-version: 5.10 -// The swift-tools-version declares the minimum version of Swift required to build this package. - -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -import PackageDescription - -let package = Package( - name: "go-swift", - platforms: [ - .macOS(.v10_14) - ], - products: [ - .library( - name: "go-swift", - type: .static, - targets: ["go-swift"]) - ], - dependencies: [ - .package(path: "../Arrow") // 👈 Reference to a Local Package - ], - targets: [ - .target( - name: "go-swift", - dependencies: [ - .product(name: "Arrow", package: "Arrow") - ]) - ] -) diff --git a/swift/CDataWGo/Sources/go-swift/CDataTest.swift b/swift/CDataWGo/Sources/go-swift/CDataTest.swift deleted file mode 100644 index b38ca7240ab..00000000000 --- a/swift/CDataWGo/Sources/go-swift/CDataTest.swift +++ /dev/null @@ -1,132 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. - -import Arrow -import ArrowC - -@_cdecl("stringTypeFromSwift") -func stringTypeFromSwift(cSchema: UnsafePointer) { - let unsafePointer = UnsafeMutablePointer(mutating: cSchema) - let exporter = ArrowCExporter() - switch exporter.exportType(&unsafePointer.pointee, arrowType: ArrowType(ArrowType.ArrowString), name: "col1") { - case .success: - return - case .failure(let err): - fatalError("Error exporting string type from swift: \(err)") - } -} - -@_cdecl("stringTypeToSwift") -func stringTypeToSwift(cSchema: UnsafePointer) { - let importer = ArrowCImporter() - switch importer.importField(cSchema.pointee) { - case .success(let field): - if field.name != "col1" { - fatalError("Field name was incorrect expected: col1 but found: \(field.name)") - } - - if field.type.id != ArrowTypeId.string { - fatalError("Field type was incorrect expected: string but found: \(field.type.id)") - } - case .failure(let err): - fatalError("Error importing string type to swift: \(err)") - } -} - -@_cdecl("arrayIntFromSwift") -func arrayIntFromSwift(cArray: UnsafePointer) { - do { - let unsafePointer = UnsafeMutablePointer(mutating: cArray) - let arrayBuilder: NumberArrayBuilder = try ArrowArrayBuilders.loadNumberArrayBuilder() - for index in 0..<100 { - arrayBuilder.append(Int32(index)) - } - - let array = try arrayBuilder.finish() - let exporter = ArrowCExporter() - exporter.exportArray(&unsafePointer.pointee, arrowData: array.arrowData) - } catch let err { - fatalError("Error exporting array from swift \(err)") - } -} - -@_cdecl("arrayStringFromSwift") -func arrayStringFromSwift(cArray: UnsafePointer) { - do { - let unsafePointer = UnsafeMutablePointer(mutating: cArray) - let arrayBuilder = try ArrowArrayBuilders.loadStringArrayBuilder() - for index in 0..<100 { - arrayBuilder.append("test" + String(index)) - } - - let array = try arrayBuilder.finish() - let exporter = ArrowCExporter() - exporter.exportArray(&unsafePointer.pointee, arrowData: array.arrowData) - } catch let err { - fatalError("Error exporting array from swift \(err)") - } -} - -@_cdecl("arrayIntToSwift") -func arrayIntToSwift(cArray: UnsafePointer) { - let importer = ArrowCImporter() - switch importer.importArray(cArray, arrowType: ArrowType(ArrowType.ArrowInt32)) { - case .success(let int32Holder): - let result = RecordBatch.Builder() - .addColumn("col1", arrowArray: int32Holder) - .finish() - switch result { - case .success(let recordBatch): - let col1: Arrow.ArrowArray = recordBatch.data(for: 0) - for index in 0..) { - let importer = ArrowCImporter() - switch importer.importArray(cArray, arrowType: ArrowType(ArrowType.ArrowString)) { - case .success(let dataHolder): - let result = RecordBatch.Builder() - .addColumn("col1", arrowArray: dataHolder) - .finish() - switch result { - case .success(let recordBatch): - let col1: Arrow.ArrowArray = recordBatch.data(for: 0) - for index in 0.. -#include "go_swift.h" -*/ -import "C" -import ( - "strconv" - "unsafe" - - "github.com/apache/arrow-go/v18/arrow" - "github.com/apache/arrow-go/v18/arrow/array" - "github.com/apache/arrow-go/v18/arrow/cdata" - "github.com/apache/arrow-go/v18/arrow/memory" -) - -func stringTypeFromSwift() { - arrowSchema := &cdata.CArrowSchema{} - swSchema := (*C.struct_ArrowSchema)(unsafe.Pointer(arrowSchema)) - C.stringTypeFromSwift(swSchema) - gofield, _ := cdata.ImportCArrowField(arrowSchema) - if gofield.Name != "col1" { - panic("Imported type has incorrect name") - } -} - -func stringTypeToSwift() { - arrowSchema := &cdata.CArrowSchema{} - swSchema := (*C.struct_ArrowSchema)(unsafe.Pointer(arrowSchema)) - C.stringTypeFromSwift(swSchema) - gofield, _ := cdata.ImportCArrowField(arrowSchema) - if gofield.Name != "col1" { - panic("Imported type has incorrect name") - } -} - -func arrayStringFromSwift() { - arrowArray := &cdata.CArrowArray{} - swarray := (*C.struct_ArrowArray)(unsafe.Pointer(arrowArray)) - C.arrayStringFromSwift(swarray) - arr, _ := cdata.ImportCArrayWithType(arrowArray, arrow.BinaryTypes.String) - if arr.Len() != 100 { - panic("Array length is incorrect") - } - - for i := 0; i < 100; i++ { - if arr.ValueStr(i) != ("test" + strconv.Itoa(i)) { - panic("Array value is incorrect") - } - } -} - -func arrayIntFromSwift() { - arrowArray := &cdata.CArrowArray{} - swarray := (*C.struct_ArrowArray)(unsafe.Pointer(arrowArray)) - C.arrayIntFromSwift(swarray) - arr, _ := cdata.ImportCArrayWithType(arrowArray, arrow.PrimitiveTypes.Int32) - if arr.Len() != 100 { - panic("Array length is incorrect") - } - - vals := arr.(*array.Int32).Int32Values() - // and that the values are correct - for i, v := range vals { - if v != int32(i) { - panic("Array value is incorrect") - } - } -} - -func arrayIntToSwift() { - bld := array.NewUint32Builder(memory.DefaultAllocator) - defer bld.Release() - bld.AppendValues([]uint32{1, 2, 3, 4}, []bool{true, true, true, true}) - goarray := bld.NewUint32Array() - var carray cdata.CArrowArray - cdata.ExportArrowArray(goarray, &carray, nil) - swarray := (*C.struct_ArrowArray)(unsafe.Pointer(&carray)) - C.arrayIntToSwift(swarray) - - if swarray.release != nil { - panic("Release was not called by swift to deallocate C array") - } -} - -func arrayStringToSwift() { - bld := array.NewStringBuilder(memory.DefaultAllocator) - defer bld.Release() - bld.AppendValues([]string{"test0", "test1", "test2", "test3"}, []bool{true, true, true, true}) - goarray := bld.NewStringArray() - var carray cdata.CArrowArray - cdata.ExportArrowArray(goarray, &carray, nil) - swarray := (*C.struct_ArrowArray)(unsafe.Pointer(&carray)) - C.arrayStringToSwift(swarray) - - if swarray.release != nil { - panic("Release was not called by swift to deallocate C array") - } -} - -func main() { - stringTypeFromSwift() - stringTypeToSwift() - arrayStringFromSwift() - arrayIntFromSwift() - arrayIntToSwift() - arrayStringToSwift() -} diff --git a/swift/data-generator/swift-datagen/go.mod b/swift/data-generator/swift-datagen/go.mod deleted file mode 100644 index 4f70d416290..00000000000 --- a/swift/data-generator/swift-datagen/go.mod +++ /dev/null @@ -1,38 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -module swift-datagen/main - -go 1.23.0 - -toolchain go1.24.1 - -require github.com/apache/arrow-go/v18 v18.3.0 - -require ( - github.com/goccy/go-json v0.10.5 // indirect - github.com/google/flatbuffers v25.2.10+incompatible // indirect - github.com/klauspost/compress v1.18.0 // indirect - github.com/klauspost/cpuid/v2 v2.2.10 // indirect - github.com/pierrec/lz4/v4 v4.1.22 // indirect - github.com/zeebo/xxh3 v1.0.2 // indirect - golang.org/x/exp v0.0.0-20240909161429-701f63a606c0 // indirect - golang.org/x/mod v0.24.0 // indirect - golang.org/x/sync v0.13.0 // indirect - golang.org/x/sys v0.33.0 // indirect - golang.org/x/tools v0.32.0 // indirect - golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da // indirect -) diff --git a/swift/data-generator/swift-datagen/go.sum b/swift/data-generator/swift-datagen/go.sum deleted file mode 100644 index 47a0aed07f6..00000000000 --- a/swift/data-generator/swift-datagen/go.sum +++ /dev/null @@ -1,54 +0,0 @@ -github.com/andybalholm/brotli v1.1.1 h1:PR2pgnyFznKEugtsUo0xLdDop5SKXd5Qf5ysW+7XdTA= -github.com/andybalholm/brotli v1.1.1/go.mod h1:05ib4cKhjx3OQYUY22hTVd34Bc8upXjOLL2rKwwZBoA= -github.com/apache/arrow-go/v18 v18.3.0 h1:Xq4A6dZj9Nu33sqZibzn012LNnewkTUlfKVUFD/RX/I= -github.com/apache/arrow-go/v18 v18.3.0/go.mod h1:eEM1DnUTHhgGAjf/ChvOAQbUQ+EPohtDrArffvUjPg8= -github.com/apache/thrift v0.21.0 h1:tdPmh/ptjE1IJnhbhrcl2++TauVjy242rkV/UzJChnE= -github.com/apache/thrift v0.21.0/go.mod h1:W1H8aR/QRtYNvrPeFXBtobyRkd0/YVhTc6i07XIAgDw= -github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c= -github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= -github.com/goccy/go-json v0.10.5 h1:Fq85nIqj+gXn/S5ahsiTlK3TmC85qgirsdTP/+DeaC4= -github.com/goccy/go-json v0.10.5/go.mod h1:oq7eo15ShAhp70Anwd5lgX2pLfOS3QCiwU/PULtXL6M= -github.com/golang/snappy v1.0.0 h1:Oy607GVXHs7RtbggtPBnr2RmDArIsAefDwvrdWvRhGs= -github.com/golang/snappy v1.0.0/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q= -github.com/google/flatbuffers v25.2.10+incompatible h1:F3vclr7C3HpB1k9mxCGRMXq6FdUalZ6H/pNX4FP1v0Q= -github.com/google/flatbuffers v25.2.10+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8= -github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= -github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= -github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0= -github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo= -github.com/klauspost/asmfmt v1.3.2 h1:4Ri7ox3EwapiOjCki+hw14RyKk201CN4rzyCJRFLpK4= -github.com/klauspost/asmfmt v1.3.2/go.mod h1:AG8TuvYojzulgDAMCnYn50l/5QV3Bs/tp6j0HLHbNSE= -github.com/klauspost/compress v1.18.0 h1:c/Cqfb0r+Yi+JtIEq73FWXVkRonBlf0CRNYc8Zttxdo= -github.com/klauspost/compress v1.18.0/go.mod h1:2Pp+KzxcywXVXMr50+X0Q/Lsb43OQHYWRCY2AiWywWQ= -github.com/klauspost/cpuid/v2 v2.2.10 h1:tBs3QSyvjDyFTq3uoc/9xFpCuOsJQFNPiAhYdw2skhE= -github.com/klauspost/cpuid/v2 v2.2.10/go.mod h1:hqwkgyIinND0mEev00jJYCxPNVRVXFQeu1XKlok6oO0= -github.com/minio/asm2plan9s v0.0.0-20200509001527-cdd76441f9d8 h1:AMFGa4R4MiIpspGNG7Z948v4n35fFGB3RR3G/ry4FWs= -github.com/minio/asm2plan9s v0.0.0-20200509001527-cdd76441f9d8/go.mod h1:mC1jAcsrzbxHt8iiaC+zU4b1ylILSosueou12R++wfY= -github.com/minio/c2goasm v0.0.0-20190812172519-36a3d3bbc4f3 h1:+n/aFZefKZp7spd8DFdX7uMikMLXX4oubIzJF4kv/wI= -github.com/minio/c2goasm v0.0.0-20190812172519-36a3d3bbc4f3/go.mod h1:RagcQ7I8IeTMnF8JTXieKnO4Z6JCsikNEzj0DwauVzE= -github.com/pierrec/lz4/v4 v4.1.22 h1:cKFw6uJDK+/gfw5BcDL0JL5aBsAFdsIT18eRtLj7VIU= -github.com/pierrec/lz4/v4 v4.1.22/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4= -github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM= -github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4= -github.com/stretchr/testify v1.10.0 h1:Xv5erBjTwe/5IxqUQTdXv5kgmIvbHo3QQyRwhJsOfJA= -github.com/stretchr/testify v1.10.0/go.mod h1:r2ic/lqez/lEtzL7wO/rwa5dbSLXVDPFyf8C91i36aY= -github.com/zeebo/assert v1.3.0 h1:g7C04CbJuIDKNPFHmsk4hwZDO5O+kntRxzaUoNXj+IQ= -github.com/zeebo/assert v1.3.0/go.mod h1:Pq9JiuJQpG8JLJdtkwrJESF0Foym2/D9XMU5ciN/wJ0= -github.com/zeebo/xxh3 v1.0.2 h1:xZmwmqxHZA8AI603jOQ0tMqmBr9lPeFwGg6d+xy9DC0= -github.com/zeebo/xxh3 v1.0.2/go.mod h1:5NWz9Sef7zIDm2JHfFlcQvNekmcEl9ekUZQQKCYaDcA= -golang.org/x/exp v0.0.0-20240909161429-701f63a606c0 h1:e66Fs6Z+fZTbFBAxKfP3PALWBtpfqks2bwGcexMxgtk= -golang.org/x/exp v0.0.0-20240909161429-701f63a606c0/go.mod h1:2TbTHSBQa924w8M6Xs1QcRcFwyucIwBGpK1p2f1YFFY= -golang.org/x/mod v0.24.0 h1:ZfthKaKaT4NrhGVZHO1/WDTwGES4De8KtWO0SIbNJMU= -golang.org/x/mod v0.24.0/go.mod h1:IXM97Txy2VM4PJ3gI61r1YEk/gAj6zAHN3AdZt6S9Ww= -golang.org/x/sync v0.13.0 h1:AauUjRAJ9OSnvULf/ARrrVywoJDy0YS2AwQ98I37610= -golang.org/x/sync v0.13.0/go.mod h1:1dzgHSNfp02xaA81J2MS99Qcpr2w7fw1gpm99rleRqA= -golang.org/x/sys v0.33.0 h1:q3i8TbbEz+JRD9ywIRlyRAQbM0qF7hu24q3teo2hbuw= -golang.org/x/sys v0.33.0/go.mod h1:BJP2sWEmIv4KK5OTEluFJCKSidICx8ciO85XgH3Ak8k= -golang.org/x/tools v0.32.0 h1:Q7N1vhpkQv7ybVzLFtTjvQya2ewbwNDZzUgfXGqtMWU= -golang.org/x/tools v0.32.0/go.mod h1:ZxrU41P/wAbZD8EDa6dDCa6XfpkhJ7HFMjHJXfBDu8s= -golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da h1:noIWHXmPHxILtqtCOPIhSt0ABwskkZKjD3bXGnZGpNY= -golang.org/x/xerrors v0.0.0-20240903120638-7835f813f4da/go.mod h1:NDW/Ps6MPRej6fsCIbMTohpP40sJ/P/vI1MoTEGwX90= -gonum.org/v1/gonum v0.16.0 h1:5+ul4Swaf3ESvrOnidPp4GZbzf0mxVQpDCYUQE7OJfk= -gonum.org/v1/gonum v0.16.0/go.mod h1:fef3am4MQ93R2HHpKnLk4/Tbh/s0+wqD5nfa6Pnwy4E= -gopkg.in/yaml.v3 v3.0.1 h1:fxVm/GzAzEWqLHuvctI91KS9hhNmmWOoWu0XTYJS7CA= -gopkg.in/yaml.v3 v3.0.1/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM= diff --git a/swift/data-generator/swift-datagen/main.go b/swift/data-generator/swift-datagen/main.go deleted file mode 100644 index dbe24222faf..00000000000 --- a/swift/data-generator/swift-datagen/main.go +++ /dev/null @@ -1,116 +0,0 @@ -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an "AS IS" BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. - -package main - -import ( - "log" - "os" - - "github.com/apache/arrow-go/v18/arrow" - "github.com/apache/arrow-go/v18/arrow/array" - "github.com/apache/arrow-go/v18/arrow/ipc" - "github.com/apache/arrow-go/v18/arrow/memory" -) - -func writeBytes(rec arrow.Record, file_name string) { - file, err := os.Create(file_name) - defer file.Close() - if err != nil { - log.Fatal(err) - } - - rr, write_err := ipc.NewFileWriter(file, ipc.WithSchema(rec.Schema())) - if write_err != nil { - log.Fatal(write_err) - } - - rr.Write(rec) - rr.Close() -} - -func writeBoolData() { - alloc := memory.NewGoAllocator() - schema := arrow.NewSchema([]arrow.Field{ - {Name: "one", Type: arrow.FixedWidthTypes.Boolean}, - {Name: "two", Type: arrow.BinaryTypes.String}, - }, nil) - - b := array.NewRecordBuilder(alloc, schema) - defer b.Release() - - b.Field(0).(*array.BooleanBuilder).AppendValues([]bool{true, false}, nil) - b.Field(0).(*array.BooleanBuilder).AppendNull() - b.Field(0).(*array.BooleanBuilder).AppendValues([]bool{false, true}, nil) - b.Field(1).(*array.StringBuilder).AppendValues([]string{"zero", "one", "two", "three", "four"}, nil) - rec := b.NewRecord() - defer rec.Release() - - writeBytes(rec, "testdata_bool.arrow") -} - -func writeDoubleData() { - alloc := memory.NewGoAllocator() - schema := arrow.NewSchema([]arrow.Field{ - {Name: "one", Type: arrow.PrimitiveTypes.Float64}, - {Name: "two", Type: arrow.BinaryTypes.String}, - }, nil) - - b := array.NewRecordBuilder(alloc, schema) - defer b.Release() - - b.Field(0).(*array.Float64Builder).AppendValues([]float64{1.1, 2.2, 3.3, 4.4, 5.5}, nil) - b.Field(1).(*array.StringBuilder).AppendValues([]string{"zero"}, nil) - b.Field(1).(*array.StringBuilder).AppendNull() - b.Field(1).(*array.StringBuilder).AppendValues([]string{"two", "three", "four"}, nil) - rec := b.NewRecord() - defer rec.Release() - - writeBytes(rec, "testdata_double.arrow") -} - -func writeStructData() { - mem := memory.NewGoAllocator() - - fields := []arrow.Field{ - {Name: "my struct", Type: arrow.StructOf([]arrow.Field{ - {Name: "my string", Type: arrow.BinaryTypes.String}, - {Name: "my bool", Type: arrow.FixedWidthTypes.Boolean}, - }...)}, - } - - schema := arrow.NewSchema(fields, nil) - - bld := array.NewRecordBuilder(mem, schema) - defer bld.Release() - - sb := bld.Field(0).(*array.StructBuilder) - f1b := sb.FieldBuilder(0).(*array.StringBuilder) - f2b := sb.FieldBuilder(1).(*array.BooleanBuilder) - - sb.AppendValues([]bool{true, true, false}) - f1b.AppendValues([]string{"0", "1", ""}, []bool{true, true, false}) - f2b.AppendValues([]bool{false, true, false}, []bool{true, true, false}) - - rec := bld.NewRecord() - writeBytes(rec, "testdata_struct.arrow") -} - -func main() { - writeBoolData() - writeDoubleData() - writeStructData() -} diff --git a/swift/gen-flatbuffers.sh b/swift/gen-flatbuffers.sh deleted file mode 100755 index f55f4eb8ba7..00000000000 --- a/swift/gen-flatbuffers.sh +++ /dev/null @@ -1,51 +0,0 @@ -#!/usr/bin/env bash - -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -set -eu -pushd Arrow/Sources/Arrow/ -flatc --swift ../../../../format/Message.fbs -flatc --swift ../../../../format/Schema.fbs -flatc --swift ../../../../format/SparseTensor.fbs -flatc --swift ../../../../format/Tensor.fbs -flatc --swift ../../../../format/File.fbs -cat <

header.swift -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. -HEADER -for generated_swift in *_generated.swift; do - mv ${generated_swift} ${generated_swift}.orig - cat header.swift ${generated_swift}.orig > ${generated_swift} - rm ${generated_swift}.orig -done -rm header.swift -popd \ No newline at end of file diff --git a/swift/gen-protobuffers.sh b/swift/gen-protobuffers.sh deleted file mode 100755 index 383a7a2f319..00000000000 --- a/swift/gen-protobuffers.sh +++ /dev/null @@ -1,44 +0,0 @@ -#!/usr/bin/env bash - -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -set -eu -protoc --swift_out=./ArrowFlight/Sources/ArrowFlight --proto_path=../format Flight.proto -protoc --grpc-swift_out=./ArrowFlight/Sources/ArrowFlight --proto_path=../format Flight.proto -cat <
header.swift -// Licensed to the Apache Software Foundation (ASF) under one -// or more contributor license agreements. See the NOTICE file -// distributed with this work for additional information -// regarding copyright ownership. The ASF licenses this file -// to you under the Apache License, Version 2.0 (the -// "License"); you may not use this file except in compliance -// with the License. You may obtain a copy of the License at -// -// http://www.apache.org/licenses/LICENSE-2.0 -// -// Unless required by applicable law or agreed to in writing, -// software distributed under the License is distributed on an -// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -// KIND, either express or implied. See the License for the -// specific language governing permissions and limitations -// under the License. -HEADER -mv ./ArrowFlight/Sources/ArrowFlight/Flight.grpc.swift ./ArrowFlight/Sources/ArrowFlight/Flight.grpc.swift.orig -cat header.swift ./ArrowFlight/Sources/ArrowFlight/Flight.grpc.swift.orig > ./ArrowFlight/Sources/ArrowFlight/Flight.grpc.swift -rm ./ArrowFlight/Sources/ArrowFlight/Flight.grpc.swift.orig -rm header.swift \ No newline at end of file From efbe5cbbffdf1fdde1b9b6e9f8f96246afd7d165 Mon Sep 17 00:00:00 2001 From: Ian Cook Date: Fri, 13 Jun 2025 16:06:03 +0200 Subject: [PATCH 52/63] GH-46775: [Docs] Fix navigation issues (#46784) Closes #46775 and #46782. This also adds external link icons beside the docs links in the Implementations table that link to docs pages outside of the main repo Sphinx docs. * GitHub Issue: #46775 --- docs/source/implementations.rst | 48 +++++++++++++++++++++++---------- 1 file changed, 34 insertions(+), 14 deletions(-) diff --git a/docs/source/implementations.rst b/docs/source/implementations.rst index 037013e9504..f68a97fe992 100644 --- a/docs/source/implementations.rst +++ b/docs/source/implementations.rst @@ -35,43 +35,43 @@ documentation and source code for these libraries. - Docs - Source * - C++ - - `C++ Docs `_ + - :doc:`C++ Docs` - `C++ Source `_ * - C GLib - - `C GLib Docs `_ + - :doc:`C GLib Docs` - `C GLib Source `_ * - C# - - `C# Docs `_ + - `C# Docs `_ :fa:`external-link-alt` - `C# Source `_ * - Go - - `Go Docs `_ + - `Go Docs `_ :fa:`external-link-alt` - `Go Source `_ * - Java - - `Java Docs `_ + - :doc:`Java Docs` - `Java Source `_ * - JavaScript - - `JavaScript Docs `_ + - `JavaScript Docs `_ :fa:`external-link-alt` - `JavaScript Source `_ * - Julia - - `Julia Docs `_ + - `Julia Docs `_ :fa:`external-link-alt` - `Julia Source `_ * - MATLAB - - `MATLAB Docs `_ + - `MATLAB Docs `_ :fa:`external-link-alt` - `MATLAB Source `_ * - Python - - `Python Docs `_ + - :doc:`Python Docs` - `Python Source `_ * - R - - `R Docs `_ + - `R Docs `_ :fa:`external-link-alt` - `R Source `_ * - Ruby - - `Ruby Docs `_ + - `Ruby Docs `_ :fa:`external-link-alt` - `Ruby Source `_ * - Rust - - `Rust Docs `_ + - `Rust Docs `_ :fa:`external-link-alt` - `Rust Source `_ * - Swift - - `Swift Docs `_ + - `Swift Docs `_ :fa:`external-link-alt` - `Swift Source `_ In addition to the libraries listed above, the Arrow project hosts the @@ -82,7 +82,7 @@ designed to help produce and consume Arrow data. :header-rows: 0 * - nanoarrow - - `nanoarrow Docs `_ + - `nanoarrow Docs `_ :fa:`external-link-alt` - `nanoarrow Source `_ Implementation Status @@ -104,3 +104,23 @@ libraries for different programming languages. The source files for the Cookbook are maintained in the `Apache Arrow Cookbooks repository `_. + +.. toctree:: + :maxdepth: 1 + :hidden: + + C++ + C GLib + C# + Go + Java + JavaScript + Julia + MATLAB + Python + R + Ruby + Rust + Swift + nanoarrow + Implementation Status From 8163bb78cc27bb912a7a8a39266f40d56281ca92 Mon Sep 17 00:00:00 2001 From: Hiroyuki Sato Date: Sat, 14 Jun 2025 04:24:25 +0900 Subject: [PATCH 53/63] GH-46806: [Ci][Dev][Swift] Remove Swift related settings (#46807) ### Rationale for this change #46803 Removed switch implementation. But, Some Swift relate files are still exists. ### What changes are included in this PR? Remove Swift related settings ### Are these changes tested? No. ### Are there any user-facing changes? No but for developers of the swift implementation it has been moved to their own repository. * GitHub Issue: #46806 Authored-by: Hiroyuki Sato Signed-off-by: Jacob Wujciak-Jens --- ci/docker/ubuntu-swift.dockerfile | 34 ---------------------- ci/scripts/swift_test.sh | 47 ------------------------------- dev/release/rat_exclude_files.txt | 2 -- docker-compose.yml | 16 ----------- 4 files changed, 99 deletions(-) delete mode 100644 ci/docker/ubuntu-swift.dockerfile delete mode 100755 ci/scripts/swift_test.sh diff --git a/ci/docker/ubuntu-swift.dockerfile b/ci/docker/ubuntu-swift.dockerfile deleted file mode 100644 index d90d2d87b90..00000000000 --- a/ci/docker/ubuntu-swift.dockerfile +++ /dev/null @@ -1,34 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -FROM swift:5.10-noble - -# Go is needed for generating test data -RUN apt-get update -y -q && \ - apt-get install -y -q --no-install-recommends \ - golang-go \ - unzip \ - wget && \ - apt-get clean - -ARG swift_lint=0.53.0 -RUN wget https://github.com/realm/SwiftLint/releases/download/${swift_lint}/swiftlint_linux.zip && \ - unzip swiftlint_linux.zip && \ - mv swiftlint /usr/local/bin/ && \ - mkdir -p /usr/local/share/doc/swiftlint/ && \ - mv LICENSE /usr/local/share/doc/swiftlint/ && \ - rm -rf swiftlint_linux.zip diff --git a/ci/scripts/swift_test.sh b/ci/scripts/swift_test.sh deleted file mode 100755 index aba90f31e50..00000000000 --- a/ci/scripts/swift_test.sh +++ /dev/null @@ -1,47 +0,0 @@ -#!/usr/bin/env bash -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -set -ex - -data_gen_dir=${1}/swift/data-generator/swift-datagen -export GOPATH=/ -pushd ${data_gen_dir} -go get -d ./... -go run . -cp *.arrow ../../Arrow -popd - -source_dir=${1}/swift -pushd ${source_dir} -swiftlint --strict -popd - -source_dir=${1}/swift/Arrow -pushd ${source_dir} -sed 's/\/\/ build://g' Package.swift > Package.swift.build -mv Package.swift.build Package.swift -swift test -popd - -source_dir=${1}/swift/ArrowFlight -pushd ${source_dir} -sed 's/\/\/ build://g' Package.swift > Package.swift.build -mv Package.swift.build Package.swift -swift test -popd diff --git a/dev/release/rat_exclude_files.txt b/dev/release/rat_exclude_files.txt index 0c827421fe8..51c01516e7c 100644 --- a/dev/release/rat_exclude_files.txt +++ b/dev/release/rat_exclude_files.txt @@ -151,5 +151,3 @@ r/tools/nixlibs-allowlist.txt .gitattributes ruby/red-arrow/.yardopts .github/pull_request_template.md -swift/data-generator/swift-datagen/go.sum -swift/CDataWGo/go.sum diff --git a/docker-compose.yml b/docker-compose.yml index 2eb8424aee5..60c79221469 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -157,7 +157,6 @@ x-hierarchy: - ubuntu-cpp-thread-sanitizer - ubuntu-cpp-emscripten - ubuntu-r-valgrind - - ubuntu-swift - ubuntu-verify-rc - r - r-revdepcheck @@ -1039,21 +1038,6 @@ services: volumes: *ubuntu-volumes command: *python-command - ubuntu-swift: - # Usage: - # docker compose build ubuntu-swift - # docker compose run --rm ubuntu-swift - # Parameters: - image: ubuntu-swift - build: - context: . - dockerfile: ci/docker/ubuntu-swift.dockerfile - shm_size: *shm-size - volumes: *ubuntu-volumes - command: > - /bin/bash -c "/arrow/ci/scripts/swift_test.sh /arrow" - - fedora-python: # Usage: # docker compose build fedora-cpp From e3719af1e10e4cb6837e41d75405a10c61f51e32 Mon Sep 17 00:00:00 2001 From: Jacob Wujciak-Jens Date: Sat, 14 Jun 2025 22:55:29 +0200 Subject: [PATCH 54/63] GH-46805: [CI][Dev] Fix caching for R hooks in lint job (#46812) ### Rationale for this change The lint job is failing, because of the R hooks missing packages. This happens because the library of the environment symlinks the R packages from the separate `renv` cache in `.local`. ### What changes are included in this PR? - also cache renv cache - only install r-base (saves ~30s) as we need to install all required packages through renv anyway ### Are these changes tested? CI ### Are there any user-facing changes? CI no longer :x: * GitHub Issue: #46805 Lead-authored-by: Jacob Wujciak-Jens Co-authored-by: Sutou Kouhei Signed-off-by: Sutou Kouhei --- .github/workflows/dev.yml | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/.github/workflows/dev.yml b/.github/workflows/dev.yml index dda648c8c47..fc18c75fd53 100644 --- a/.github/workflows/dev.yml +++ b/.github/workflows/dev.yml @@ -55,12 +55,14 @@ jobs: sudo apt update sudo apt install -y -V \ pre-commit \ - r-cran-xml2 \ + r-base \ ruby-dev - name: Cache pre-commit uses: actions/cache@v4 with: - path: ~/.cache/pre-commit + path: | + ~/.cache/pre-commit + ~/.local/share/renv/cache key: pre-commit-${{ hashFiles('.pre-commit-config.yaml') }} - name: Run pre-commit run: | From 3309fe6b005cd461a8a2285b54a4cb44839f78fb Mon Sep 17 00:00:00 2001 From: Ian Cook Date: Sun, 15 Jun 2025 12:44:45 +0200 Subject: [PATCH 55/63] GH-46816: [Docs] Fix links to Swift docs and source (#46817) Closes #46816 * GitHub Issue: #46816 --- README.md | 1 + docs/source/implementations.rst | 6 +++--- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 06d2a113898..f31dee92d0e 100644 --- a/README.md +++ b/README.md @@ -54,6 +54,7 @@ Major components of the project include: - [R libraries](https://github.com/apache/arrow/tree/main/r) - [Ruby libraries](https://github.com/apache/arrow/tree/main/ruby) - [Rust libraries](https://github.com/apache/arrow-rs) `↗` + - [Swift libraries](https://github.com/apache/arrow-swift) `↗` The `↗` icon denotes that this component of the project is maintained in a separate repository. diff --git a/docs/source/implementations.rst b/docs/source/implementations.rst index f68a97fe992..6597f3a527f 100644 --- a/docs/source/implementations.rst +++ b/docs/source/implementations.rst @@ -71,8 +71,8 @@ documentation and source code for these libraries. - `Rust Docs `_ :fa:`external-link-alt` - `Rust Source `_ * - Swift - - `Swift Docs `_ :fa:`external-link-alt` - - `Swift Source `_ + - `Swift Docs `_ :fa:`external-link-alt` + - `Swift Source `_ In addition to the libraries listed above, the Arrow project hosts the **nanoarrow** subproject which provides a set of lightweight libraries @@ -121,6 +121,6 @@ The source files for the Cookbook are maintained in the R Ruby Rust - Swift + Swift nanoarrow Implementation Status From 1e3becbc480178fdf7798a5aa57e304d4ad7367e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Ra=C3=BAl=20Cumplido?= Date: Sun, 15 Jun 2025 23:10:39 +0200 Subject: [PATCH 56/63] =?UTF-8?q?GH-46809:=20[CI][Packaging]=20Stop=20tryi?= =?UTF-8?q?ng=20to=20add=20headers=20from=20arrow/compu=E2=80=A6=20(#46810?= =?UTF-8?q?)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit ### Rationale for this change The PR to split the Arrow compute kernels into its own shared library had some headers at arrow/compute/kernels during development. Those were moved to arrow/compute on this commit: https://github.com/apache/arrow/pull/46261/commits/1d90aeb9c370f8e258fcd5c55648845ad189cb99 We missed to update the RPM packages in the interim as there are no public headers to be installed anymore from `arrow/compute/kernels` ### What changes are included in this PR? Stop trying to include or exclude `/arrow/compute/kernels` headers on RPM package ### Are these changes tested? Yes, via archery ### Are there any user-facing changes? No * GitHub Issue: #46809 Authored-by: Raúl Cumplido Signed-off-by: Sutou Kouhei --- dev/tasks/linux-packages/apache-arrow/yum/arrow.spec.in | 2 -- 1 file changed, 2 deletions(-) diff --git a/dev/tasks/linux-packages/apache-arrow/yum/arrow.spec.in b/dev/tasks/linux-packages/apache-arrow/yum/arrow.spec.in index 47e8230a071..d881789087e 100644 --- a/dev/tasks/linux-packages/apache-arrow/yum/arrow.spec.in +++ b/dev/tasks/linux-packages/apache-arrow/yum/arrow.spec.in @@ -320,7 +320,6 @@ Libraries and header files for Apache Arrow C++. %{_datadir}/gdb/auto-load/ %{_includedir}/arrow/ %exclude %{_includedir}/arrow/acero/ -%exclude %{_includedir}/arrow/compute/kernels %exclude %{_includedir}/arrow/compute/row %exclude %{_includedir}/arrow/dataset/ %if %{use_flight} @@ -363,7 +362,6 @@ Libraries and header files for Apache Arrow Compute %defattr(-,root,root,-) %doc README.md %license LICENSE.txt NOTICE.txt -%{_includedir}/arrow/compute/kernels %{_includedir}/arrow/compute/row %{_libdir}/cmake/ArrowCompute/ %{_libdir}/libarrow_compute.a From 2f2e0c6dd8289afbba8e19512cfb2d8589e7f8ee Mon Sep 17 00:00:00 2001 From: Hiroyuki Sato Date: Mon, 16 Jun 2025 06:14:44 +0900 Subject: [PATCH 57/63] GH-46757: [CI][Packaging][Conan] Synchronize upstream conan (#46758) ### Rationale for this change Apache Arrow 19.0.1 available in the conan repository. Need synchronize upstream. ### What changes are included in this PR? * execute `ci/conan/merge_upstream.sh` * Resolve merge failed files. (Both files contains license headers. So patch failed) * `ci/conan/all/test_package/CMakeLists.txt` * `ci/conan/all/conanfile.py` ### Are these changes tested? Yes. ### Are there any user-facing changes? No. * GitHub Issue: #46757 Lead-authored-by: Hiroyuki Sato Co-authored-by: Sutou Kouhei Signed-off-by: Sutou Kouhei --- ci/conan/all/conandata.yml | 10 +++ ci/conan/all/conanfile.py | 70 +++------------- .../all/patches/19.0.1-0001-fix-cmake.patch | 79 +++++++++++++++++++ .../19.0.1-0002-fix-downloaded-mimalloc.patch | 37 +++++++++ ci/conan/all/test_package/CMakeLists.txt | 2 +- ci/conan/config.yml | 2 + ci/conan/merge_status.sh | 2 +- 7 files changed, 140 insertions(+), 62 deletions(-) create mode 100644 ci/conan/all/patches/19.0.1-0001-fix-cmake.patch create mode 100644 ci/conan/all/patches/19.0.1-0002-fix-downloaded-mimalloc.patch diff --git a/ci/conan/all/conandata.yml b/ci/conan/all/conandata.yml index a13b31c2e82..4efaf70568f 100644 --- a/ci/conan/all/conandata.yml +++ b/ci/conan/all/conandata.yml @@ -21,6 +21,9 @@ # SOFTWARE. sources: + "19.0.1": + url: "https://www.apache.org/dyn/closer.lua/arrow/arrow-19.0.1/apache-arrow-19.0.1.tar.gz?action=download" + sha256: "acb76266e8b0c2fbb7eb15d542fbb462a73b3fd1e32b80fad6c2fafd95a51160" "18.1.0": url: "https://www.apache.org/dyn/closer.lua/arrow/arrow-18.1.0/apache-arrow-18.1.0.tar.gz?action=download" sha256: "2dc8da5f8796afe213ecc5e5aba85bb82d91520eff3cf315784a52d0fa61d7fc" @@ -40,6 +43,13 @@ sources: url: "https://www.apache.org/dyn/closer.lua/arrow/arrow-14.0.2/apache-arrow-14.0.2.tar.gz?action=download" sha256: "1304dedb41896008b89fe0738c71a95d9b81752efc77fa70f264cb1da15d9bc2" patches: + "19.0.1": + - patch_file: "patches/19.0.1-0001-fix-cmake.patch" + patch_description: "use cci package" + patch_type: "conan" + - patch_file: "patches/19.0.1-0002-fix-downloaded-mimalloc.patch" + patch_description: "use cci package" + patch_type: "conan" "18.1.0": - patch_file: "patches/18.0.0-0001-fix-cmake.patch" patch_description: "use cci package" diff --git a/ci/conan/all/conanfile.py b/ci/conan/all/conanfile.py index 5db9fe35672..6881ee6f325 100644 --- a/ci/conan/all/conanfile.py +++ b/ci/conan/all/conanfile.py @@ -20,6 +20,8 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. +import os + from conan import ConanFile from conan.errors import ConanInvalidConfiguration, ConanException from conan.tools.build import check_min_cppstd, cross_building @@ -28,11 +30,9 @@ from conan.tools.microsoft import is_msvc, is_msvc_static_runtime from conan.tools.scm import Version -import os -import glob - required_conan_version = ">=2.1.0" + class ArrowConan(ConanFile): name = "arrow" description = "Apache Arrow is a cross-language development platform for in-memory data" @@ -102,7 +102,7 @@ class ArrowConan(ConanFile): "dataset_modules": False, "deprecated": True, "encryption": False, - "filesystem_layer": False, + "filesystem_layer": True, "hdfs_bridgs": False, "plasma": "deprecated", "simd_level": "default", @@ -142,7 +142,7 @@ class ArrowConan(ConanFile): def _min_cppstd(self): # arrow >= 10.0.0 requires C++17. # https://github.com/apache/arrow/pull/13991 - return "11" if Version(self.version) < "10.0.0" else "17" + return "17" def export_sources(self): export_conandata_patches(self) @@ -151,10 +151,10 @@ def export_sources(self): def config_options(self): if self.settings.os == "Windows": del self.options.fPIC - if Version(self.version) < "8.0.0": - del self.options.substrait if is_msvc(self): self.options.with_boost = True + if Version(self.version) >= "19.0.0": + self.options.with_mimalloc = True def configure(self): if self.options.shared: @@ -209,9 +209,6 @@ def requirements(self): self.requires("snappy/1.1.9") if self.options.get_safe("simd_level") != None or \ self.options.get_safe("runtime_simd_level") != None: - if Version(self.version) < 8: - self.requires("xsimd/9.0.1") - else: self.requires("xsimd/13.0.0") if self.options.with_zlib: self.requires("zlib/[>=1.2.11 <2]") @@ -253,15 +250,6 @@ def validate(self): if self.settings.compiler.get_safe("cppstd"): check_min_cppstd(self, self._min_cppstd) - if ( - Version(self.version) < "10.0.0" - and self.settings.compiler == "clang" - and Version(self.settings.compiler.version) < "3.9" - ): - raise ConanInvalidConfiguration( - f"{self.ref} requires C++11, which needs at least clang-3.9" - ) - if self.options.get_safe("skyhook", False): raise ConanInvalidConfiguration("CCI has no librados recipe (yet)") if self.options.with_cuda: @@ -425,28 +413,11 @@ def generate(self): tc.generate() deps = CMakeDeps(self) + deps.set_property("mimalloc", "cmake_target_name", "mimalloc::mimalloc") deps.generate() def _patch_sources(self): apply_conandata_patches(self) - if Version(self.version) < "10.0.0": - for filename in glob.glob(os.path.join(self.source_folder, "cpp", "cmake_modules", "Find*.cmake")): - if os.path.basename(filename) not in [ - "FindArrow.cmake", - "FindArrowAcero.cmake", - "FindArrowCUDA.cmake", - "FindArrowDataset.cmake", - "FindArrowFlight.cmake", - "FindArrowFlightSql.cmake", - "FindArrowFlightTesting.cmake", - "FindArrowPython.cmake", - "FindArrowPythonFlight.cmake", - "FindArrowSubstrait.cmake", - "FindArrowTesting.cmake", - "FindGandiva.cmake", - "FindParquet.cmake", - ]: - os.remove(filename) def build(self): self._patch_sources() @@ -464,29 +435,6 @@ def package(self): rmdir(self, os.path.join(self.package_folder, "lib", "pkgconfig")) rmdir(self, os.path.join(self.package_folder, "share")) - cmake_suffix = "shared" if self.options.shared else "static" - - alias_map = { f"Arrow::arrow_{cmake_suffix}": f"arrow::arrow_{cmake_suffix}" } - - if self.options.parquet: - alias_map[f"Parquet::parquet_{cmake_suffix}"] = f"arrow::parquet_{cmake_suffix}" - - if self.options.get_safe("substrait"): - alias_map[f"Arrow::arrow_substrait_{cmake_suffix}"] = f"arrow::arrow_substrait_{cmake_suffix}" - - if self.options.acero: - alias_map[f"Arrow::arrow_acero_{cmake_suffix}"] = f"arrow::arrow_acero_{cmake_suffix}" - - if self.options.gandiva: - alias_map[f"Gandiva::gandiva_{cmake_suffix}"] = f"arrow::gandiva_{cmake_suffix}" - - if self.options.with_flight_rpc: - alias_map[f"ArrowFlight::arrow_flight_sql_{cmake_suffix}"] = f"arrow::arrow_flight_sql_{cmake_suffix}" - - @property - def _module_subfolder(self): - return os.path.join("lib", "cmake") - def package_info(self): # FIXME: fix CMake targets of components @@ -556,6 +504,8 @@ def package_info(self): self.cpp_info.components["dataset"].libs = ["arrow_dataset"] if self.options.parquet: self.cpp_info.components["dataset"].requires = ["libparquet"] + if self.options.acero and Version(self.version) >= "19.0.0": + self.cpp_info.components["dataset"].requires = ["libacero"] if self.options.cli and (self.options.with_cuda or self.options.with_flight_rpc or self.options.parquet): binpath = os.path.join(self.package_folder, "bin") diff --git a/ci/conan/all/patches/19.0.1-0001-fix-cmake.patch b/ci/conan/all/patches/19.0.1-0001-fix-cmake.patch new file mode 100644 index 00000000000..0d37465a0eb --- /dev/null +++ b/ci/conan/all/patches/19.0.1-0001-fix-cmake.patch @@ -0,0 +1,79 @@ +MIT License + +Copyright (c) 2025 Conan.io + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +diff --git a/cpp/cmake_modules/FindThriftAlt.cmake b/cpp/cmake_modules/FindThriftAlt.cmake +index 98a706d..edf195e 100644 +--- a/cpp/cmake_modules/FindThriftAlt.cmake ++++ b/cpp/cmake_modules/FindThriftAlt.cmake +@@ -45,22 +45,20 @@ endif() + # * https://github.com/apache/thrift/pull/2725 + # * https://github.com/apache/thrift/pull/2726 + # * https://github.com/conda-forge/thrift-cpp-feedstock/issues/68 +-if(NOT WIN32) +- set(find_package_args "") +- if(ThriftAlt_FIND_VERSION) +- list(APPEND find_package_args ${ThriftAlt_FIND_VERSION}) +- endif() +- if(ThriftAlt_FIND_QUIETLY) +- list(APPEND find_package_args QUIET) +- endif() +- find_package(Thrift ${find_package_args}) +- if(Thrift_FOUND) +- set(ThriftAlt_FOUND TRUE) +- add_executable(thrift::compiler IMPORTED) +- set_target_properties(thrift::compiler PROPERTIES IMPORTED_LOCATION +- "${THRIFT_COMPILER}") +- return() +- endif() ++set(find_package_args "") ++if(ThriftAlt_FIND_VERSION) ++ list(APPEND find_package_args ${ThriftAlt_FIND_VERSION}) ++endif() ++if(ThriftAlt_FIND_QUIETLY) ++ list(APPEND find_package_args QUIET) ++endif() ++find_package(Thrift ${find_package_args}) ++if(Thrift_FOUND) ++ set(ThriftAlt_FOUND TRUE) ++ add_executable(thrift::compiler IMPORTED) ++ set_target_properties(thrift::compiler PROPERTIES IMPORTED_LOCATION ++ "${THRIFT_COMPILER}") ++ return() + endif() + + function(extract_thrift_version) +diff --git a/cpp/src/parquet/size_statistics.cc b/cpp/src/parquet/size_statistics.cc +index 1ce6c937a..e45eef3f0 100644 +--- a/cpp/src/parquet/size_statistics.cc ++++ b/cpp/src/parquet/size_statistics.cc +@@ -18,9 +18,11 @@ + #include "parquet/size_statistics.h" + + #include ++#include + #include + #include + #include ++#include + + #include "arrow/util/logging.h" + #include "parquet/exception.h" diff --git a/ci/conan/all/patches/19.0.1-0002-fix-downloaded-mimalloc.patch b/ci/conan/all/patches/19.0.1-0002-fix-downloaded-mimalloc.patch new file mode 100644 index 00000000000..d49f14dcc8d --- /dev/null +++ b/ci/conan/all/patches/19.0.1-0002-fix-downloaded-mimalloc.patch @@ -0,0 +1,37 @@ +MIT License + +Copyright (c) 2025 Conan.io + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +diff --git a/cpp/cmake_modules/ThirdpartyToolchain.cmake b/cpp/cmake_modules/ThirdpartyToolchain.cmake +index abfe6d2..cc0f3c5 100644 +--- a/cpp/cmake_modules/ThirdpartyToolchain.cmake ++++ b/cpp/cmake_modules/ThirdpartyToolchain.cmake +@@ -2259,6 +2259,10 @@ endif() + # mimalloc - Cross-platform high-performance allocator, from Microsoft + + if(ARROW_MIMALLOC) ++ find_package(mimalloc REQUIRED CONFIG) ++endif() ++ ++if(0) + if(NOT ARROW_ENABLE_THREADING) + message(FATAL_ERROR "Can't use mimalloc with ARROW_ENABLE_THREADING=OFF") + endif() diff --git a/ci/conan/all/test_package/CMakeLists.txt b/ci/conan/all/test_package/CMakeLists.txt index b25c8e889cb..d85120a6626 100644 --- a/ci/conan/all/test_package/CMakeLists.txt +++ b/ci/conan/all/test_package/CMakeLists.txt @@ -20,7 +20,7 @@ # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. -cmake_minimum_required(VERSION 3.8) +cmake_minimum_required(VERSION 3.15) project(test_package LANGUAGES CXX) find_package(Arrow REQUIRED CONFIG) diff --git a/ci/conan/config.yml b/ci/conan/config.yml index cbb2fce0547..33c0261df74 100644 --- a/ci/conan/config.yml +++ b/ci/conan/config.yml @@ -21,6 +21,8 @@ # SOFTWARE. versions: + "19.0.1": + folder: all "18.1.0": folder: all "18.0.0": diff --git a/ci/conan/merge_status.sh b/ci/conan/merge_status.sh index 600385c0e17..229c3a4394c 100644 --- a/ci/conan/merge_status.sh +++ b/ci/conan/merge_status.sh @@ -15,4 +15,4 @@ # specific language governing permissions and limitations # under the License. -UPSTREAM_REVISION=a9b270f9d2052e193ce3c0a6c4e2fda0b0ac5ade +UPSTREAM_REVISION=1729c3c2c3b0e9d058821fa00e8a54154415efc6 From 47244ca265f5eab56efbc7929138dd320808bd58 Mon Sep 17 00:00:00 2001 From: Sutou Kouhei Date: Mon, 16 Jun 2025 17:53:19 +0900 Subject: [PATCH 58/63] GH-46820: [CI][Integration] Use Node.js 20 by default (#46821) ### Rationale for this change We're using `NODE=XX` in `.env` as the default Node.js version. ### What changes are included in this PR? Use `NODE=20` in `.env`. ### Are these changes tested? Yes. ### Are there any user-facing changes? No. * GitHub Issue: #46820 Authored-by: Sutou Kouhei Signed-off-by: Sutou Kouhei --- .env | 2 +- ci/docker/conda-integration.dockerfile | 4 +++- ci/scripts/integration_arrow.sh | 2 +- 3 files changed, 5 insertions(+), 3 deletions(-) diff --git a/.env b/.env index b54b4e5dad7..02a2cf79deb 100644 --- a/.env +++ b/.env @@ -65,7 +65,7 @@ KARTOTHEK=latest # LLVM 12 and GCC 11 reports -Wmismatched-new-delete. LLVM=18 MAVEN=3.8.7 -NODE=18 +NODE=20 NUMBA=latest NUMPY=latest PANDAS=latest diff --git a/ci/docker/conda-integration.dockerfile b/ci/docker/conda-integration.dockerfile index 9101446da66..5a5556c23bc 100644 --- a/ci/docker/conda-integration.dockerfile +++ b/ci/docker/conda-integration.dockerfile @@ -20,10 +20,12 @@ ARG arch=amd64 FROM ${repo}:${arch}-conda-cpp ARG arch=amd64 +# We need to synchronize the following values with the values in .env +# and services.conda-integration in docker-compose.yml. ARG maven=3.8.7 ARG node=20 ARG yarn=1.22 -ARG jdk=11 +ARG jdk=17 # Install Archery and integration dependencies COPY ci/conda_env_archery.txt /arrow/ci/ diff --git a/ci/scripts/integration_arrow.sh b/ci/scripts/integration_arrow.sh index b7771bef875..8ef977ad230 100755 --- a/ci/scripts/integration_arrow.sh +++ b/ci/scripts/integration_arrow.sh @@ -27,7 +27,7 @@ gold_dir=$arrow_dir/testing/data/arrow-ipc-stream/integration : ${ARROW_INTEGRATION_CPP:=ON} : ${ARROW_INTEGRATION_CSHARP:=ON} -: ${ARCHERY_INTEGRATION_TARGET_IMPLEMENTATIONS:=cpp,csharp,js} +: ${ARCHERY_INTEGRATION_TARGET_IMPLEMENTATIONS:=cpp,csharp} export ARCHERY_INTEGRATION_TARGET_IMPLEMENTATIONS . ${arrow_dir}/ci/scripts/util_log.sh From 738cb5374c0752be650af831755563d0720c4634 Mon Sep 17 00:00:00 2001 From: Nic Crane Date: Mon, 16 Jun 2025 16:38:48 +0100 Subject: [PATCH 59/63] GH-46719: [R] Add 32 and 64 bit Decimal types (#46720) ### Rationale for this change 32 and 64 bit Decimal types were added in C++ in https://github.com/apache/arrow/pull/43957 but haven't been implemented in R yet ### What changes are included in this PR? Implements them in R ### Are these changes tested? Yup ### Are there any user-facing changes? Yeah, new types but also the implicit downcasting so we should think about how to communicate this if at all * GitHub Issue: #46719 Authored-by: Nic Crane Signed-off-by: Nic Crane --- .../vector_selection_filter_internal.cc | 4 + r/NAMESPACE | 2 + r/R/arrowExports.R | 8 ++ r/R/dplyr-funcs-simple.R | 2 +- r/R/dplyr-funcs-type.R | 2 +- r/R/enums.R | 14 +- r/R/type.R | 26 +++- r/man/data-type.Rd | 6 + r/src/array_to_vector.cpp | 6 + r/src/arrowExports.cpp | 20 +++ r/src/datatype.cpp | 18 +++ r/tests/testthat/test-Array.R | 30 ++++- r/tests/testthat/test-chunked-array.R | 1 + r/tests/testthat/test-data-type.R | 14 +- r/tests/testthat/test-dplyr-funcs-type.R | 126 ++++++++++++++---- r/tests/testthat/test-type.R | 10 +- r/vignettes/data_types.Rmd | 123 ++++++++++++----- 17 files changed, 332 insertions(+), 80 deletions(-) diff --git a/cpp/src/arrow/compute/kernels/vector_selection_filter_internal.cc b/cpp/src/arrow/compute/kernels/vector_selection_filter_internal.cc index 194c3591337..1c2eacb9a76 100644 --- a/cpp/src/arrow/compute/kernels/vector_selection_filter_internal.cc +++ b/cpp/src/arrow/compute/kernels/vector_selection_filter_internal.cc @@ -1096,6 +1096,8 @@ void PopulateFilterKernels(std::vector* out) { {InputType(match::LargeBinaryLike()), plain_filter, BinaryFilterExec}, {InputType(null()), plain_filter, NullFilterExec}, {InputType(Type::FIXED_SIZE_BINARY), plain_filter, PrimitiveFilterExec}, + {InputType(Type::DECIMAL32), plain_filter, PrimitiveFilterExec}, + {InputType(Type::DECIMAL64), plain_filter, PrimitiveFilterExec}, {InputType(Type::DECIMAL128), plain_filter, PrimitiveFilterExec}, {InputType(Type::DECIMAL256), plain_filter, PrimitiveFilterExec}, {InputType(Type::DICTIONARY), plain_filter, DictionaryFilterExec}, @@ -1116,6 +1118,8 @@ void PopulateFilterKernels(std::vector* out) { {InputType(match::LargeBinaryLike()), ree_filter, BinaryFilterExec}, {InputType(null()), ree_filter, NullFilterExec}, {InputType(Type::FIXED_SIZE_BINARY), ree_filter, PrimitiveFilterExec}, + {InputType(Type::DECIMAL32), ree_filter, PrimitiveFilterExec}, + {InputType(Type::DECIMAL64), ree_filter, PrimitiveFilterExec}, {InputType(Type::DECIMAL128), ree_filter, PrimitiveFilterExec}, {InputType(Type::DECIMAL256), ree_filter, PrimitiveFilterExec}, {InputType(Type::DICTIONARY), ree_filter, DictionaryFilterExec}, diff --git a/r/NAMESPACE b/r/NAMESPACE index 412d70ed22c..cdeb27c4067 100644 --- a/r/NAMESPACE +++ b/r/NAMESPACE @@ -319,6 +319,8 @@ export(date64) export(decimal) export(decimal128) export(decimal256) +export(decimal32) +export(decimal64) export(default_memory_pool) export(dictionary) export(duration) diff --git a/r/R/arrowExports.R b/r/R/arrowExports.R index 4ed612fc734..a988cfb4af7 100644 --- a/r/R/arrowExports.R +++ b/r/R/arrowExports.R @@ -924,6 +924,14 @@ Null__initialize <- function() { .Call(`_arrow_Null__initialize`) } +Decimal32Type__initialize <- function(precision, scale) { + .Call(`_arrow_Decimal32Type__initialize`, precision, scale) +} + +Decimal64Type__initialize <- function(precision, scale) { + .Call(`_arrow_Decimal64Type__initialize`, precision, scale) +} + Decimal128Type__initialize <- function(precision, scale) { .Call(`_arrow_Decimal128Type__initialize`, precision, scale) } diff --git a/r/R/dplyr-funcs-simple.R b/r/R/dplyr-funcs-simple.R index 05780721f24..ac8a7a46ca4 100644 --- a/r/R/dplyr-funcs-simple.R +++ b/r/R/dplyr-funcs-simple.R @@ -190,7 +190,7 @@ common_type <- function(exprs) { cast_or_parse <- function(x, type) { to_type_id <- type$id - if (to_type_id %in% c(Type[["DECIMAL128"]], Type[["DECIMAL256"]])) { + if (to_type_id %in% c(Type[["DECIMAL32"]], Type[["DECIMAL64"]], Type[["DECIMAL128"]], Type[["DECIMAL256"]])) { # TODO: determine the minimum size of decimal (or integer) required to # accommodate x # We would like to keep calculations on decimal if that's what the data has diff --git a/r/R/dplyr-funcs-type.R b/r/R/dplyr-funcs-type.R index 85c26ec05c8..1b42e52baac 100644 --- a/r/R/dplyr-funcs-type.R +++ b/r/R/dplyr-funcs-type.R @@ -186,7 +186,7 @@ register_bindings_type_inspect <- function() { is.numeric(x) || (inherits(x, "Expression") && x$type_id() %in% Type[c( "UINT8", "INT8", "UINT16", "INT16", "UINT32", "INT32", "UINT64", "INT64", "HALF_FLOAT", "FLOAT", "DOUBLE", - "DECIMAL128", "DECIMAL256" + "DECIMAL32", "DECIMAL64", "DECIMAL128", "DECIMAL256" )]) }) register_binding("base::is.double", function(x) { diff --git a/r/R/enums.R b/r/R/enums.R index 98995b2a2e7..a28728552f8 100644 --- a/r/R/enums.R +++ b/r/R/enums.R @@ -80,7 +80,13 @@ Type <- enum("Type::type", LARGE_BINARY = 35L, LARGE_LIST = 36L, INTERVAL_MONTH_DAY_NANO = 37L, - RUN_END_ENCODED = 38L + RUN_END_ENCODED = 38L, + STRING_VIEW = 39L, + BINARY_VIEW = 40L, + LIST_VIEW = 41L, + LARGE_LIST_VIEW = 42L, + DECIMAL32 = 43L, + DECIMAL64 = 44L ) TYPES_WITH_NAN <- Type[c("HALF_FLOAT", "FLOAT", "DOUBLE")] @@ -88,9 +94,9 @@ TYPES_NUMERIC <- Type[ c( "INT8", "UINT8", "INT16", "UINT16", "INT32", "UINT32", "INT64", "UINT64", "HALF_FLOAT", "FLOAT", "DOUBLE", - "DECIMAL128", "DECIMAL256" - ) - ] + "DECIMAL32", "DECIMAL64", "DECIMAL128", "DECIMAL256" + ) +] #' @rdname enums #' @export diff --git a/r/R/type.R b/r/R/type.R index d6db6f146ed..1552d1dfa58 100644 --- a/r/R/type.R +++ b/r/R/type.R @@ -300,6 +300,10 @@ DecimalType <- R6Class("DecimalType", ) ) +Decimal32Type <- R6Class("Decimal32Type", inherit = DecimalType) + +Decimal64Type <- R6Class("Decimal64Type", inherit = DecimalType) + Decimal128Type <- R6Class("Decimal128Type", inherit = DecimalType) Decimal256Type <- R6Class("Decimal256Type", inherit = DecimalType) @@ -586,11 +590,29 @@ decimal <- function(precision, scale) { if (args$precision > 38) { decimal256(args$precision, args$scale) - } else { + } else if (args$precision > 18) { decimal128(args$precision, args$scale) + } else if (args$precision > 9) { + decimal64(args$precision, args$scale) + } else { + decimal32(args$precision, args$scale) } } +#' @rdname data-type +#' @export +decimal32 <- function(precision, scale) { + args <- check_decimal_args(precision, scale) + Decimal32Type__initialize(args$precision, args$scale) +} + +#' @rdname data-type +#' @export +decimal64 <- function(precision, scale) { + args <- check_decimal_args(precision, scale) + Decimal64Type__initialize(args$precision, args$scale) +} + #' @rdname data-type #' @export decimal128 <- function(precision, scale) { @@ -768,6 +790,8 @@ canonical_type_str <- function(type_str) { time64 = "time64", null = "null", timestamp = "timestamp", + decimal32 = "decimal32", + decimal64 = "decimal64", decimal128 = "decimal128", decimal256 = "decimal256", struct = "struct", diff --git a/r/man/data-type.Rd b/r/man/data-type.Rd index 214e8ddc1f6..4a410e5cb45 100644 --- a/r/man/data-type.Rd +++ b/r/man/data-type.Rd @@ -31,6 +31,8 @@ \alias{null} \alias{timestamp} \alias{decimal} +\alias{decimal32} +\alias{decimal64} \alias{decimal128} \alias{decimal256} \alias{struct} @@ -100,6 +102,10 @@ timestamp(unit = c("s", "ms", "us", "ns"), timezone = "") decimal(precision, scale) +decimal32(precision, scale) + +decimal64(precision, scale) + decimal128(precision, scale) decimal256(precision, scale) diff --git a/r/src/array_to_vector.cpp b/r/src/array_to_vector.cpp index 2f0508eb7a4..d27e1b93a93 100644 --- a/r/src/array_to_vector.cpp +++ b/r/src/array_to_vector.cpp @@ -1313,6 +1313,12 @@ std::shared_ptr Converter::Make( return std::make_shared(chunked_array); } + case Type::DECIMAL32: + return std::make_shared>(chunked_array); + + case Type::DECIMAL64: + return std::make_shared>(chunked_array); + case Type::DECIMAL128: return std::make_shared>(chunked_array); diff --git a/r/src/arrowExports.cpp b/r/src/arrowExports.cpp index c71d1c77305..c31cc6dc9c1 100644 --- a/r/src/arrowExports.cpp +++ b/r/src/arrowExports.cpp @@ -2551,6 +2551,24 @@ BEGIN_CPP11 END_CPP11 } // datatype.cpp +std::shared_ptr Decimal32Type__initialize(int32_t precision, int32_t scale); +extern "C" SEXP _arrow_Decimal32Type__initialize(SEXP precision_sexp, SEXP scale_sexp){ +BEGIN_CPP11 + arrow::r::Input::type precision(precision_sexp); + arrow::r::Input::type scale(scale_sexp); + return cpp11::as_sexp(Decimal32Type__initialize(precision, scale)); +END_CPP11 +} +// datatype.cpp +std::shared_ptr Decimal64Type__initialize(int32_t precision, int32_t scale); +extern "C" SEXP _arrow_Decimal64Type__initialize(SEXP precision_sexp, SEXP scale_sexp){ +BEGIN_CPP11 + arrow::r::Input::type precision(precision_sexp); + arrow::r::Input::type scale(scale_sexp); + return cpp11::as_sexp(Decimal64Type__initialize(precision, scale)); +END_CPP11 +} +// datatype.cpp std::shared_ptr Decimal128Type__initialize(int32_t precision, int32_t scale); extern "C" SEXP _arrow_Decimal128Type__initialize(SEXP precision_sexp, SEXP scale_sexp){ BEGIN_CPP11 @@ -5910,6 +5928,8 @@ static const R_CallMethodDef CallEntries[] = { { "_arrow_Date32__initialize", (DL_FUNC) &_arrow_Date32__initialize, 0}, { "_arrow_Date64__initialize", (DL_FUNC) &_arrow_Date64__initialize, 0}, { "_arrow_Null__initialize", (DL_FUNC) &_arrow_Null__initialize, 0}, + { "_arrow_Decimal32Type__initialize", (DL_FUNC) &_arrow_Decimal32Type__initialize, 2}, + { "_arrow_Decimal64Type__initialize", (DL_FUNC) &_arrow_Decimal64Type__initialize, 2}, { "_arrow_Decimal128Type__initialize", (DL_FUNC) &_arrow_Decimal128Type__initialize, 2}, { "_arrow_Decimal256Type__initialize", (DL_FUNC) &_arrow_Decimal256Type__initialize, 2}, { "_arrow_DayTimeInterval__initialize", (DL_FUNC) &_arrow_DayTimeInterval__initialize, 0}, diff --git a/r/src/datatype.cpp b/r/src/datatype.cpp index 2f2b89d658d..ea407fc7776 100644 --- a/r/src/datatype.cpp +++ b/r/src/datatype.cpp @@ -81,6 +81,10 @@ const char* r6_class_name::get( case Type::DURATION: return "DurationType"; + case Type::DECIMAL32: + return "Decimal32Type"; + case Type::DECIMAL64: + return "Decimal64Type"; case Type::DECIMAL128: return "Decimal128Type"; case Type::DECIMAL256: @@ -181,6 +185,20 @@ std::shared_ptr Date64__initialize() { return arrow::date64(); // [[arrow::export]] std::shared_ptr Null__initialize() { return arrow::null(); } +// [[arrow::export]] +std::shared_ptr Decimal32Type__initialize(int32_t precision, + int32_t scale) { + // Use the builder that validates inputs + return ValueOrStop(arrow::Decimal32Type::Make(precision, scale)); +} + +// [[arrow::export]] +std::shared_ptr Decimal64Type__initialize(int32_t precision, + int32_t scale) { + // Use the builder that validates inputs + return ValueOrStop(arrow::Decimal64Type::Make(precision, scale)); +} + // [[arrow::export]] std::shared_ptr Decimal128Type__initialize(int32_t precision, int32_t scale) { diff --git a/r/tests/testthat/test-Array.R b/r/tests/testthat/test-Array.R index ecb0f65dd75..cde6877c4da 100644 --- a/r/tests/testthat/test-Array.R +++ b/r/tests/testthat/test-Array.R @@ -1321,7 +1321,14 @@ test_that("Array to C-interface", { }) test_that("Can convert R integer/double to decimal (ARROW-11631)", { - # Check both decimal128 and decimal256 + # Check all of decimal32, decimal64, decimal128 and decimal256 + + + decimal32_from_dbl <- arrow_array(c(1, NA_real_), type = decimal32(9, 2)) + decimal64_from_dbl <- arrow_array(c(1, NA_real_), type = decimal64(12, 2)) + decimal32_from_int <- arrow_array(c(1L, NA_integer_), type = decimal32(9, 2)) + decimal64_from_int <- arrow_array(c(1L, NA_integer_), type = decimal64(12, 2)) + decimal128_from_dbl <- arrow_array(c(1, NA_real_), type = decimal128(12, 2)) decimal256_from_dbl <- arrow_array(c(1, NA_real_), type = decimal256(12, 2)) decimal128_from_int <- arrow_array(c(1L, NA_integer_), type = decimal128(12, 2)) @@ -1333,6 +1340,16 @@ test_that("Can convert R integer/double to decimal (ARROW-11631)", { decimal_from_altrep_dbl <- arrow_array(altrep_dbl, type = decimal128(12, 2)) decimal_from_altrep_int <- arrow_array(altrep_int, type = decimal128(12, 2)) + expect_equal( + decimal32_from_dbl, + arrow_array(c(1, NA))$cast(decimal32(9, 2)) + ) + + expect_equal( + decimal64_from_dbl, + arrow_array(c(1, NA))$cast(decimal64(12, 2)) + ) + expect_equal( decimal128_from_dbl, arrow_array(c(1, NA))$cast(decimal128(12, 2)) @@ -1343,6 +1360,17 @@ test_that("Can convert R integer/double to decimal (ARROW-11631)", { arrow_array(c(1, NA))$cast(decimal256(12, 2)) ) + expect_equal( + decimal32_from_int, + arrow_array(c(1, NA))$cast(decimal32(9, 2)) + ) + + expect_equal( + decimal64_from_int, + arrow_array(c(1, NA))$cast(decimal64(12, 2)) + ) + + expect_equal( decimal128_from_int, arrow_array(c(1, NA))$cast(decimal128(12, 2)) diff --git a/r/tests/testthat/test-chunked-array.R b/r/tests/testthat/test-chunked-array.R index 4ee71260799..560b2459790 100644 --- a/r/tests/testthat/test-chunked-array.R +++ b/r/tests/testthat/test-chunked-array.R @@ -248,6 +248,7 @@ test_that("ChunkedArray supports empty arrays (ARROW-13761)", { int8(), int16(), int32(), int64(), uint8(), uint16(), uint32(), uint64(), float32(), float64(), timestamp("ns"), binary(), large_binary(), fixed_size_binary(32), date32(), date64(), + decimal32(4, 2), decimal64(4, 2), decimal128(4, 2), decimal256(4, 2), dictionary(), struct(x = int32()) ) diff --git a/r/tests/testthat/test-data-type.R b/r/tests/testthat/test-data-type.R index e7212eb61b5..fa2e5bcd6e8 100644 --- a/r/tests/testthat/test-data-type.R +++ b/r/tests/testthat/test-data-type.R @@ -474,16 +474,15 @@ test_that("DictionaryType validation", { }) test_that("decimal type and validation", { - expect_r6_class(decimal(4, 2), "Decimal128Type") + expect_r6_class(decimal(4, 2), "Decimal32Type") + expect_r6_class(decimal(14, 2), "Decimal64Type") + expect_r6_class(decimal(22, 2), "Decimal128Type") expect_r6_class(decimal(39, 2), "Decimal256Type") expect_error(decimal("four"), "`precision` must be an integer") expect_error(decimal(4, "two"), "`scale` must be an integer") expect_error(decimal(NA, 2), "`precision` must be an integer") expect_error(decimal(4, NA), "`scale` must be an integer") - # TODO remove precision range tests below once functionality is tested in C++ (ARROW-15162) - expect_error(decimal(0, 2), "Invalid: Decimal precision out of range [1, 38]: 0", fixed = TRUE) - expect_error(decimal(100, 2), "Invalid: Decimal precision out of range [1, 76]: 100", fixed = TRUE) # decimal() creates either decimal128 or decimal256 based on precision expect_identical(class(decimal(38, 2)), class(decimal128(38, 2))) @@ -497,10 +496,6 @@ test_that("decimal type and validation", { expect_error(decimal128(4, NA), "`scale` must be an integer") expect_error(decimal128(3:4, NA), "`precision` must have size 1. not size 2") expect_error(decimal128(4, 2:3), "`scale` must have size 1. not size 2") - # TODO remove precision range tests below once functionality is tested in C++ (ARROW-15162) - expect_error(decimal128(0, 2), "Invalid: Decimal precision out of range [1, 38]: 0", fixed = TRUE) - expect_error(decimal128(100, 2), "Invalid: Decimal precision out of range [1, 38]: 100", fixed = TRUE) - expect_r6_class(decimal256(4, 2), "Decimal256Type") @@ -510,9 +505,6 @@ test_that("decimal type and validation", { expect_error(decimal256(4, NA), "`scale` must be an integer") expect_error(decimal256(3:4, NA), "`precision` must have size 1. not size 2") expect_error(decimal256(4, 2:3), "`scale` must have size 1. not size 2") - # TODO remove precision range tests below once functionality is tested in C++ (ARROW-15162) - expect_error(decimal256(0, 2), "Invalid: Decimal precision out of range [1, 76]: 0", fixed = TRUE) - expect_error(decimal256(100, 2), "Invalid: Decimal precision out of range [1, 76]: 100", fixed = TRUE) }) test_that("Binary", { diff --git a/r/tests/testthat/test-dplyr-funcs-type.R b/r/tests/testthat/test-dplyr-funcs-type.R index 08fa6ddc64c..dbbd2ab206f 100644 --- a/r/tests/testthat/test-dplyr-funcs-type.R +++ b/r/tests/testthat/test-dplyr-funcs-type.R @@ -249,57 +249,131 @@ test_that("is.na() evaluates to TRUE on NaN (ARROW-12055)", { test_that("type checks with is() giving Arrow types", { # with class2=DataType + extract_logicals <- function(x) { + x %>% + collect() %>% + t() %>% + as.vector() + } + expect_equal( - Table$create( - i32 = Array$create(1, int32()), - dec = Array$create(pi)$cast(decimal(3, 2)), - dec128 = Array$create(pi)$cast(decimal128(3, 2)), - dec256 = Array$create(pi)$cast(decimal256(3, 2)), - f64 = Array$create(1.1, float64()), - str = Array$create("a", arrow::string()) - ) %>% + Table$create(i32 = Array$create(1, int32())) %>% transmute( i32_is_i32 = is(i32, int32()), i32_is_dec = is(i32, decimal(3, 2)), + i32_is_dec32 = is(i32, decimal32(3, 2)), + i32_is_dec64 = is(i32, decimal64(3, 2)), i32_is_dec128 = is(i32, decimal128(3, 2)), i32_is_dec256 = is(i32, decimal256(3, 2)), i32_is_f64 = is(i32, float64()), - i32_is_str = is(i32, string()), + i32_is_str = is(i32, string()) + ) %>% + extract_logicals(), + c(TRUE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE) + ) + + expect_equal( + Table$create(dec = Array$create(pi)$cast(decimal(3, 2))) %>% + transmute( dec_is_i32 = is(dec, int32()), dec_is_dec = is(dec, decimal(3, 2)), + dec_is_dec32 = is(dec, decimal32(3, 2)), + dec_is_dec64 = is(dec, decimal64(3, 2)), dec_is_dec128 = is(dec, decimal128(3, 2)), dec_is_dec256 = is(dec, decimal256(3, 2)), dec_is_f64 = is(dec, float64()), - dec_is_str = is(dec, string()), + dec_is_str = is(dec, string()) + ) %>% + extract_logicals(), + c(FALSE, TRUE, TRUE, FALSE, FALSE, FALSE, FALSE, FALSE) + ) + + expect_equal( + Table$create(dec32 = Array$create(pi)$cast(decimal32(3, 2))) %>% + transmute( + dec32_is_i32 = is(dec32, int32()), + dec32_is_dec32 = is(dec32, decimal32(3, 2)), + dec32_is_dec64 = is(dec32, decimal64(3, 2)), + dec32_is_dec128 = is(dec32, decimal128(3, 2)), + dec32_is_dec256 = is(dec32, decimal256(3, 2)), + dec32_is_f64 = is(dec32, float64()), + dec32_is_str = is(dec32, string()) + ) %>% + extract_logicals(), + c(FALSE, TRUE, FALSE, FALSE, FALSE, FALSE, FALSE) + ) + + expect_equal( + Table$create(dec64 = Array$create(pi)$cast(decimal64(3, 2))) %>% + transmute( + dec64_is_i32 = is(dec64, int32()), + dec64_is_dec32 = is(dec64, decimal32(3, 2)), + dec64_is_dec64 = is(dec64, decimal64(3, 2)), + dec64_is_dec128 = is(dec64, decimal128(3, 2)), + dec64_is_dec256 = is(dec64, decimal256(3, 2)), + dec64_is_f64 = is(dec64, float64()), + dec64_is_str = is(dec64, string()) + ) %>% + extract_logicals(), + c(FALSE, FALSE, TRUE, FALSE, FALSE, FALSE, FALSE) + ) + + expect_equal( + Table$create(dec128 = Array$create(pi)$cast(decimal128(3, 2))) %>% + transmute( dec128_is_i32 = is(dec128, int32()), + dec128_is_dec32 = is(dec128, decimal32(3, 2)), + dec128_is_dec64 = is(dec128, decimal64(3, 2)), dec128_is_dec128 = is(dec128, decimal128(3, 2)), dec128_is_dec256 = is(dec128, decimal256(3, 2)), dec128_is_f64 = is(dec128, float64()), - dec128_is_str = is(dec128, string()), - dec256_is_i32 = is(dec128, int32()), - dec256_is_dec128 = is(dec128, decimal128(3, 2)), - dec256_is_dec256 = is(dec128, decimal256(3, 2)), - dec256_is_f64 = is(dec128, float64()), - dec256_is_str = is(dec128, string()), + dec128_is_str = is(dec128, string()) + ) %>% + extract_logicals(), + c(FALSE, FALSE, FALSE, TRUE, FALSE, FALSE, FALSE) + ) + + expect_equal( + Table$create(dec256 = Array$create(pi)$cast(decimal256(3, 2))) %>% + transmute( + dec256_is_i32 = is(dec256, int32()), + dec256_is_dec32 = is(dec256, decimal32(3, 2)), + dec256_is_dec64 = is(dec256, decimal64(3, 2)), + dec256_is_dec128 = is(dec256, decimal128(3, 2)), + dec256_is_dec256 = is(dec256, decimal256(3, 2)), + dec256_is_f64 = is(dec256, float64()), + dec256_is_str = is(dec256, string()) + ) %>% + extract_logicals(), + c(FALSE, FALSE, FALSE, FALSE, TRUE, FALSE, FALSE) + ) + + expect_equal( + Table$create( + f64 = Array$create(1.1, float64()), + str = Array$create("a", arrow::string()) + ) %>% + transmute( f64_is_i32 = is(f64, int32()), f64_is_dec = is(f64, decimal(3, 2)), + f64_is_dec32 = is(f64, decimal32(3, 2)), + f64_is_dec64 = is(f64, decimal64(3, 2)), f64_is_dec128 = is(f64, decimal128(3, 2)), f64_is_dec256 = is(f64, decimal256(3, 2)), f64_is_f64 = is(f64, float64()), f64_is_str = is(f64, string()), str_is_i32 = is(str, int32()), + str_is_dec32 = is(str, decimal32(3, 2)), + str_is_dec64 = is(str, decimal64(3, 2)), str_is_dec128 = is(str, decimal128(3, 2)), str_is_dec256 = is(str, decimal256(3, 2)), str_is_i64 = is(str, float64()), str_is_str = is(str, string()) ) %>% - collect() %>% - t() %>% - as.vector(), + extract_logicals(), c( - TRUE, FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, TRUE, TRUE, FALSE, FALSE, - FALSE, FALSE, TRUE, FALSE, FALSE, FALSE, FALSE, TRUE, FALSE, FALSE, FALSE, - FALSE, FALSE, FALSE, FALSE, TRUE, FALSE, FALSE, FALSE, FALSE, FALSE, TRUE + FALSE, FALSE, FALSE, FALSE, FALSE, FALSE, TRUE, FALSE, FALSE, + FALSE, FALSE, FALSE, FALSE, FALSE, TRUE ) ) # with class2=string @@ -322,9 +396,7 @@ test_that("type checks with is() giving Arrow types", { str_is_i64 = is(str, "double"), str_is_str = is(str, "string") ) %>% - collect() %>% - t() %>% - as.vector(), + extract_logicals(), c(TRUE, FALSE, FALSE, TRUE, FALSE, FALSE, FALSE, TRUE, FALSE, FALSE, FALSE, TRUE) ) # with class2=string alias @@ -362,9 +434,7 @@ test_that("type checks with is() giving Arrow types", { str_is_lgl = is(str, "boolean"), str_is_str = is(str, "utf8") ) %>% - collect() %>% - t() %>% - as.vector(), + extract_logicals(), c( TRUE, FALSE, FALSE, FALSE, FALSE, FALSE, TRUE, FALSE, FALSE, FALSE, FALSE, FALSE, TRUE, FALSE, FALSE, FALSE, FALSE, FALSE, TRUE, FALSE, FALSE, FALSE, diff --git a/r/tests/testthat/test-type.R b/r/tests/testthat/test-type.R index 4f6210c29c1..b01af80dc67 100644 --- a/r/tests/testthat/test-type.R +++ b/r/tests/testthat/test-type.R @@ -200,9 +200,17 @@ test_that("Type strings are correctly canonicalized", { canonical_type_str("timestamp"), sub("^([^([<]+).*$", "\\1", timestamp()$ToString()) ) + expect_equal( + canonical_type_str("decimal32"), + sub("^([^([<]+).*$", "\\1", decimal32(3, 2)$ToString()) + ) + expect_equal( + canonical_type_str("decimal64"), + sub("^([^([<]+).*$", "\\1", decimal64(3, 2)$ToString()) + ) expect_equal( canonical_type_str("decimal128"), - sub("^([^([<]+).*$", "\\1", decimal(3, 2)$ToString()) + sub("^([^([<]+).*$", "\\1", decimal(31, 2)$ToString()) ) expect_equal( canonical_type_str("decimal128"), diff --git a/r/vignettes/data_types.Rmd b/r/vignettes/data_types.Rmd index 4b5ee01b6ab..d5c70a8f02d 100644 --- a/r/vignettes/data_types.Rmd +++ b/r/vignettes/data_types.Rmd @@ -6,11 +6,16 @@ description: > output: rmarkdown::html_vignette --- -Arrow has a rich data type system that includes direct analogs of many R data types, and many data types that do not have a counterpart in R. This article describes the Arrow type system, compares it to R data types, and outlines the default mappings used when data are transferred from Arrow to R. At the end of the article there are two lookup tables: one describing the default "R to Arrow" type mappings and the other describing the "Arrow to R" mappings. +Arrow has a rich data type system that includes direct analogs of many R data types, and many data types that do not have a counterpart +in R. This article describes the Arrow type system, compares it to R data types, and outlines the default mappings used when data are + transferred from Arrow to R. At the end of the article there are two lookup tables: one describing the default "R to Arrow" type mappings + and the other describing the "Arrow to R" mappings. ## Motivating example -To illustrate the conversion that needs to take place, consider the differences between the output when obtain we use `dplyr::glimpse()` to inspect the `starwars` data in its original format -- as a data frame in R -- and the output we obtain when we convert it to an Arrow Table first by calling `arrow_table()`: +To illustrate the conversion that needs to take place, consider the differences between the output when obtain we use `dplyr::glimpse()` + to inspect the `starwars` data in its original format -- as a data frame in R -- and the output we obtain when we convert it to an Arrow + Table first by calling `arrow_table()`: ```{r} library(dplyr, warn.conflicts = FALSE) @@ -22,33 +27,51 @@ glimpse(arrow_table(starwars)) The data represented are essentially the same, but the descriptions of the data types for the columns have changed. For example: -- `name` is labelled `` (character vector) in the data frame; it is labelled `` (a string type, also referred to as utf8 type) in the Arrow Table +- `name` is labelled `` (character vector) in the data frame; it is labelled `` (a string type, also referred to as utf8 +type) in the Arrow Table - `height` is labelled `` (integer vector) in the data frame; it is labelled `` (32 bit signed integer) in the Arrow Table - `mass` is labelled `` (numeric vector) in the data frame; it is labelled `` (64 bit floating point number) in the Arrow Table -Some of these differences are purely cosmetic: integers in R are in fact 32 bit signed integers, so the underlying data types in Arrow and R are direct analogs of one another. In other cases the differences are purely about the implementation: Arrow and R have different ways to store a vector of strings, but at a high level of abstraction the R character type and the Arrow string type can be viewed as direct analogs. In some cases, however, there are no clear analogs: while Arrow has an analog of POSIXct (the timestamp type) it does not have an analog of POSIXlt; conversely, while R can represent 32 bit signed integers, it does not have an equivalent of a 64 bit unsigned integer. +Some of these differences are purely cosmetic: integers in R are in fact 32 bit signed integers, so the underlying data types in Arrow +and R are direct analogs of one another. In other cases the differences are purely about the implementation: Arrow and R have different + ways to store a vector of strings, but at a high level of abstraction the R character type and the Arrow string type can be viewed as + direct analogs. In some cases, however, there are no clear analogs: while Arrow has an analog of POSIXct (the timestamp type) it does + not have an analog of POSIXlt; conversely, while R can represent 32 bit signed integers, it does not have an equivalent of a 64 bit + unsigned integer. -When the arrow package converts between R data and Arrow data, it will first check to see if a Schema has been provided -- see `schema()` for more information -- and if none is available it will attempt to guess the appropriate type by following the default mappings. A complete listing of these mappings is provided at the end of the article, but the most common cases are depicted in the illustration below: +When the arrow package converts between R data and Arrow data, it will first check to see if a Schema has been provided -- see +`schema()` for more information -- and if none is available it will attempt to guess the appropriate type by following the default +mappings. A complete listing of these mappings is provided at the end of the article, but the most common cases are depicted in +the illustration below: ```{r, echo=FALSE, out.width="100%"} knitr::include_graphics("./data_types.png") ``` -In this image, black boxes refer to R data types and light blue boxes refer to Arrow data types. Directional arrows specify conversions (e.g., the bidirectional arrow between the logical R type and the boolean Arrow type means that the logical R converts to an Arrow boolean and vice versa). Solid lines indicate that this conversion rule is always the default; dashed lines mean that it only sometimes applies (the rules and special cases are described below). +In this image, black boxes refer to R data types and light blue boxes refer to Arrow data types. Directional arrows specify +conversions (e.g., the bidirectional arrow between the logical R type and the boolean Arrow type means that the logical R +converts to an Arrow boolean and vice versa). Solid lines indicate that this conversion rule is always the default; dashed lines +mean that it only sometimes applies (the rules and special cases are described below). ## Logical/boolean types -Arrow and R both use three-valued logic. In R, logical values can be `TRUE` or `FALSE`, with `NA` used to represent missing data. In Arrow, the corresponding boolean type can take values `true`, `false`, or `null`, as shown below: +Arrow and R both use three-valued logic. In R, logical values can be `TRUE` or `FALSE`, with `NA` used to represent missing data. +In Arrow, the corresponding boolean type can take values `true`, `false`, or `null`, as shown below: ```{r} chunked_array(c(TRUE, FALSE, NA), type = boolean()) # default ``` -It is not strictly necessary to set `type = boolean()` in this example because the default behavior in arrow is to translate R logical vectors to Arrow booleans and vice versa. However, for the sake of clarity we will specify the data types explicitly throughout this article. We will likewise use `chunked_array()` to create Arrow data from R objects and `as.vector()` to create R data from Arrow objects, but similar results are obtained if we use other methods. +It is not strictly necessary to set `type = boolean()` in this example because the default behavior in arrow is to translate R + logical vectors to Arrow booleans and vice versa. However, for the sake of clarity we will specify the data types explicitly + throughout this article. We will likewise use `chunked_array()` to create Arrow data from R objects and `as.vector()` to create + R data from Arrow objects, but similar results are obtained if we use other methods. ## Integer types -Base R natively supports only one type of integer, using 32 bits to represent signed numbers between -2147483648 and 2147483647, though R can also support 64 bit integers via the [`bit64`](https://cran.r-project.org/package=bit64) package. Arrow inherits signed and unsigned integer types from C++ in 8 bit, 16 bit, 32 bit, and 64 bit versions: +Base R natively supports only one type of integer, using 32 bits to represent signed numbers between -2147483648 and 2147483647, + though R can also support 64 bit integers via the [`bit64`](https://cran.r-project.org/package=bit64) package. Arrow inherits + signed and unsigned integer types from C++ in 8 bit, 16 bit, 32 bit, and 64 bit versions: | Description | Data Type Function | Smallest Value | Largest Value | | --------------- | -----------------: | -------------------: | -------------------: | @@ -78,11 +101,14 @@ When translating from Arrow to R, integer types alway translate to R integers un - If the value of an Arrow uint32 or uint64 falls outside the range allowed for R integers, the result will be a numeric vector in R - If the value of an Arrow int64 variable falls outside the range allowed for R integers, the result will be a `bit64::integer64` vector in R -- If the user sets `options(arrow.int64_downcast = FALSE)`, the Arrow int64 type always yields a `bit64::integer64` vector in R regardless of the value +- If the user sets `options(arrow.int64_downcast = FALSE)`, the Arrow int64 type always yields a `bit64::integer64` vector in R + regardless of the value ## Floating point numeric types -R has one double-precision (64 bit) numeric type, which translates to the Arrow 64 bit floating point type by default. Arrow supports both single-precision (32 bit) and double-precision (64 bit) floating point numbers, specified using the `float32()` and `float64()` data type functions. Both of these are translated to doubles in R. Examples are shown below: +R has one double-precision (64 bit) numeric type, which translates to the Arrow 64 bit floating point type by default. Arrow supports + both single-precision (32 bit) and double-precision (64 bit) floating point numbers, specified using the `float32()` and `float64()` + data type functions. Both of these are translated to doubles in R. Examples are shown below: ```{r} chunked_array(c(0.1, 0.2, 0.3), type = float64()) # default @@ -96,14 +122,22 @@ Note that the Arrow specification also permits half-precision (16 bit) floating ## Fixed point decimal types -Arrow also contains `decimal()` data types, in which numeric values are specified in decimal format rather than binary. Decimals in Arrow come in two varieties, a 128 bit version and a 256 bit version, but in most cases users should be able to use the more general `decimal()` data type function rather than the specific `decimal128()` and `decimal256()` functions. +Arrow also contains `decimal()` data types, in which numeric values are specified in decimal format rather than binary. +Decimals in Arrow come in two varieties, a 128 bit version and a 256 bit version, but in most cases users should be able +to use the more general `decimal()` data type function rather than the specific `decimal32()`, `decimal64()`, `decimal128()`, + and `decimal256()` functions. -The decimal types in Arrow are fixed-precision numbers (rather than floating-point), which means it is necessary to explicitly specify the `precision` and `scale` arguments: +The decimal types in Arrow are fixed-precision numbers (rather than floating-point), which means it is necessary to explicitly +specify the `precision` and `scale` arguments: - `precision` specifies the number of significant digits to store. -- `scale` specifies the number of digits that should be stored after the decimal point. If you set `scale = 2`, exactly two digits will be stored after the decimal point. If you set `scale = 0`, values will be rounded to the nearest whole number. Negative scales are also permitted (handy when dealing with extremely large numbers), so `scale = -2` stores the value to the nearest 100. +- `scale` specifies the number of digits that should be stored after the decimal point. If you set `scale = 2`, exactly two +digits will be stored after the decimal point. If you set `scale = 0`, values will be rounded to the nearest whole number. +Negative scales are also permitted (handy when dealing with extremely large numbers), so `scale = -2` stores the value to the nearest 100. -Because R does not have any way to create decimal types natively, the example below is a little circuitous. First we create some floating point numbers as Chunked Arrays, and then explicitly cast these to decimal types within Arrow. This is possible because Chunked Array objects possess a `cast()` method: +Because R does not have any way to create decimal types natively, the example below is a little circuitous. First we create +some floating point numbers as Chunked Arrays, and then explicitly cast these to decimal types within Arrow. +This is possible because Chunked Array objects possess a `cast()` method: ```{r} arrow_floating <- chunked_array(c(.01, .1, 1, 10, 100)) @@ -111,11 +145,15 @@ arrow_decimals <- arrow_floating$cast(decimal(precision = 5, scale = 2)) arrow_decimals ``` -Though not natively used in R, decimal types can be useful in situations where it is especially important to avoid problems that arise in floating point arithmetic. +Though not natively used in R, decimal types can be useful in situations where it is especially important to avoid problems that arise + in floating point arithmetic. ## String/character types -R uses a single character type to represent strings whereas Arrow has two types. In the Arrow C++ library these types are referred to as strings and large_strings, but to avoid ambiguity in the arrow R package they are defined using the `utf8()` and `large_utf8()` data type functions. The distinction between these two Arrow types is unlikely to be important for R users, though the difference is discussed in the article on [data object layout](./developers/data_object_layout.html). +R uses a single character type to represent strings whereas Arrow has two types. In the Arrow C++ library these types are referred to +as strings and large_strings, but to avoid ambiguity in the arrow R package they are defined using the `utf8()` and `large_utf8()` data + type functions. The distinction between these two Arrow types is unlikely to be important for R users, though the difference is discussed + in the article on [data object layout](./developers/data_object_layout.html). The default behavior is to translate R character vectors to the utf8/string type, and to translate both Arrow types to R character vectors: @@ -127,7 +165,8 @@ as.vector(strings) ## Factor/dictionary types -The analog of R factors in Arrow is the dictionary type. Factors translate to dictionaries and vice versa. To illustrate this, let's create a small factor object in R: +The analog of R factors in Arrow is the dictionary type. Factors translate to dictionaries and vice versa. To illustrate this, let's +create a small factor object in R: ```{r} fct <- factor(c("cat", "dog", "pig", "dog")) @@ -147,11 +186,15 @@ When translated back to R, we recover the original factor: as.vector(dict) ``` -Arrow dictionaries are slightly more flexible than R factors: values in a dictionary do not necessarily have to be strings, but labels in a factor do. As a consequence, non-string values in an Arrow dictionary are coerced to strings when translated to R. +Arrow dictionaries are slightly more flexible than R factors: values in a dictionary do not necessarily have to be strings, but labels +in a factor do. As a consequence, non-string values in an Arrow dictionary are coerced to strings when translated to R. ## Date types -In R, dates are typically represented using the Date class. Internally a Date object is a numeric type whose value counts the number of days since the beginning of the Unix epoch (1 January 1970). Arrow supplies two data types that can be used to represent dates: the date32 type and the date64 type. The date32 type is similar to the Date class in R: internally it stores a 32 bit integer that counts the number of days since 1 January 1970. The default in arrow is to translate R Date objects to Arrow date32 types: +In R, dates are typically represented using the Date class. Internally a Date object is a numeric type whose value counts the number +of days since the beginning of the Unix epoch (1 January 1970). Arrow supplies two data types that can be used to represent dates: + the date32 type and the date64 type. The date32 type is similar to the Date class in R: internally it stores a 32 bit integer that + counts the number of days since 1 January 1970. The default in arrow is to translate R Date objects to Arrow date32 types: ```{r} nirvana_album_dates <- as.Date(c("1989-06-15", "1991-09-24", "1993-09-13")) @@ -160,7 +203,8 @@ nirvana_32 <- chunked_array(nirvana_album_dates, type = date32()) # default nirvana_32 ``` -Arrow also supplies a higher-precision date64 type, in which the date is represented as a 64 bit integer that encodes the number of *milliseconds* since 1970-01-01 00:00 UTC: +Arrow also supplies a higher-precision date64 type, in which the date is represented as a 64 bit integer that encodes the number of + *milliseconds* since 1970-01-01 00:00 UTC: ```{r} nirvana_64 <- chunked_array(nirvana_album_dates, type = date64()) @@ -173,7 +217,8 @@ The translation from Arrow to R differs. Internally the date32 type is very simi class(as.vector(nirvana_32)) ``` -However, because date64 types are specified to millisecond-level precision, they are translated to R as POSIXct times to avoid the possibility of losing relevant information: +However, because date64 types are specified to millisecond-level precision, they are translated to R as POSIXct times to avoid the +possibility of losing relevant information: ```{r} class(as.vector(nirvana_64)) @@ -181,35 +226,45 @@ class(as.vector(nirvana_64)) ## Temporal/timestamp types -In R there are two classes used to represent date and time information, POSIXct and POSIXlt. Arrow only has one: the timestamp type. Arrow timestamps are loosely analogous to the POSIXct class. Internally, a POSIXct object represents the date with as a numeric variable that stores the number of seconds since 1970-01-01 00:00 UTC. Internally, an Arrow timestamp is a 64 bit integer counting the number of milliseconds since 1970-01-01 00:00 UTC. +In R there are two classes used to represent date and time information, POSIXct and POSIXlt. Arrow only has one: the timestamp type. +Arrow timestamps are loosely analogous to the POSIXct class. Internally, a POSIXct object represents the date with as a numeric variable +that stores the number of seconds since 1970-01-01 00:00 UTC. Internally, an Arrow timestamp is a 64 bit integer counting the number of + milliseconds since 1970-01-01 00:00 UTC. -Arrow and R both support timezone information, but display it differently in the printed object. In R, local time is printed with the timezone name adjacent to it: +Arrow and R both support timezone information, but display it differently in the printed object. In R, local time is printed with the +timezone name adjacent to it: ```{r} sydney_newyear <- as.POSIXct("2000-01-01 00:01", tz = "Australia/Sydney") sydney_newyear ``` -When translated to Arrow, this POSIXct object becomes an Arrow timestamp object. When printed, however, the temporal instant is always displayed in UTC rather than local time: +When translated to Arrow, this POSIXct object becomes an Arrow timestamp object. When printed, however, the temporal instant is always +displayed in UTC rather than local time: ```{r} sydney_newyear_arrow <- chunked_array(sydney_newyear, type = timestamp()) sydney_newyear_arrow ``` -The timezone information is not lost, however, which we can easily see by translating the `sydney_newyear_arrow` object back to an R POSIXct object: +The timezone information is not lost, however, which we can easily see by translating the `sydney_newyear_arrow` object back to an + R POSIXct object: ```{r} as.vector(sydney_newyear_arrow) ``` -For POSIXlt objects the behaviour is different. Internally a POSIXlt object is a list specifying the "local time" in terms of a variety of human-relevant fields. There is no analogous class to this in Arrow, so the default behaviour is to translate it to an Arrow list. +For POSIXlt objects the behaviour is different. Internally a POSIXlt object is a list specifying the "local time" in terms of a +variety of human-relevant fields. There is no analogous class to this in Arrow, so the default behaviour is to translate it to an Arrow list. ## Time of day types -Base R does not have a class to represent the time of day independent of the date (i.e., it is not possible to specify "3pm" without referring to a specific day), but it can be done with the help of the [`hms`](https://hms.tidyverse.org/) package. Internally, hms objects are always stored as the number of seconds since 00:00:00. +Base R does not have a class to represent the time of day independent of the date (i.e., it is not possible to specify "3pm" without + referring to a specific day), but it can be done with the help of the [`hms`](https://hms.tidyverse.org/) package. Internally, + hms objects are always stored as the number of seconds since 00:00:00. -Arrow has two data types for this purposes. For time32 types, data are stored as a 32 bit integer that is interpreted either as the number of seconds or the number of milliseconds since 00:00:00. Note the difference between the following: +Arrow has two data types for this purposes. For time32 types, data are stored as a 32 bit integer that is interpreted either as the + number of seconds or the number of milliseconds since 00:00:00. Note the difference between the following: ```{r} time_of_day <- hms::hms(56, 34, 12) @@ -217,7 +272,8 @@ chunked_array(time_of_day, type = time32(unit = "s")) chunked_array(time_of_day, type = time32(unit = "ms")) ``` -A time64 object is similar, but stores the time of day using a 64 bit integer and can represent the time at higher precision. It is possible to choose microseconds (`unit = "us"`) or nanoseconds (`unit = "ns"`), as shown below: +A time64 object is similar, but stores the time of day using a 64 bit integer and can represent the time at higher precision. It is + possible to choose microseconds (`unit = "us"`) or nanoseconds (`unit = "ns"`), as shown below: ```{r} chunked_array(time_of_day, type = time64(unit = "us")) @@ -228,7 +284,9 @@ All versions of time32 and time64 objects in Arrow translate to hms times in R. ## Duration types -Lengths of time are represented as difftime objects in R. The analogous data type in Arrow is the duration type. A duration type is stored as a 64 bit integer, which can represent the number of seconds (the default, `unit = "s"`), milliseconds (`unit = "ms"`), microseconds (`unit = "us"`), or nanoseconds (`unit = "ns"`). To illustrate this we'll create a difftime in R corresponding to 278 seconds: +Lengths of time are represented as difftime objects in R. The analogous data type in Arrow is the duration type. A duration type +is stored as a 64 bit integer, which can represent the number of seconds (the default, `unit = "s"`), milliseconds (`unit = "ms"`), + microseconds (`unit = "us"`), or nanoseconds (`unit = "ns"`). To illustrate this we'll create a difftime in R corresponding to 278 seconds: ```{r} len <- as.difftime(278, unit = "secs") @@ -247,7 +305,8 @@ Regardless of the underlying unit, duration objects in Arrow translate to diffti ## List of default translations -The discussion above covers the most common cases. The two tables in this section provide a more complete list of how arrow translates between R data types and Arrow data types. In these table, entries with a `-` are not currently implemented. +The discussion above covers the most common cases. The two tables in this section provide a more complete list of how arrow + translates between R data types and Arrow data types. In these table, entries with a `-` are not currently implemented. ### Translations from R to Arrow From d688760336bc2d178029e0b11d221d23ba62cc35 Mon Sep 17 00:00:00 2001 From: Hiroyuki Sato Date: Tue, 17 Jun 2025 11:01:53 +0900 Subject: [PATCH 60/63] GH-46773: [GLib] Add GArrowFixedSizeListDataType (#46774) ### Rationale for this change GLib should be able to use `arrow::FixedSizeListType`. ### What changes are included in this PR? Add `GArrowFixedSizeListDataType`. ### Are these changes tested? Yes. ### Are there any user-facing changes? Yes. * GitHub Issue: #46773 Lead-authored-by: Hiroyuki Sato Co-authored-by: Sutou Kouhei Signed-off-by: Sutou Kouhei --- c_glib/arrow-glib/basic-data-type.cpp | 3 + c_glib/arrow-glib/composite-data-type.cpp | 121 +++++++++++++++++- c_glib/arrow-glib/composite-data-type.h | 25 ++++ c_glib/test/test-fixed-size-list-data-type.rb | 61 +++++++++ 4 files changed, 204 insertions(+), 6 deletions(-) create mode 100644 c_glib/test/test-fixed-size-list-data-type.rb diff --git a/c_glib/arrow-glib/basic-data-type.cpp b/c_glib/arrow-glib/basic-data-type.cpp index 27bae3b220e..51fffb73693 100644 --- a/c_glib/arrow-glib/basic-data-type.cpp +++ b/c_glib/arrow-glib/basic-data-type.cpp @@ -2652,6 +2652,9 @@ garrow_data_type_new_raw(std::shared_ptr *arrow_data_type) } type = GARROW_TYPE_EXTENSION_DATA_TYPE; break; + case arrow::Type::type::FIXED_SIZE_LIST: + type = GARROW_TYPE_FIXED_SIZE_LIST_DATA_TYPE; + break; case arrow::Type::type::RUN_END_ENCODED: type = GARROW_TYPE_RUN_END_ENCODED_DATA_TYPE; break; diff --git a/c_glib/arrow-glib/composite-data-type.cpp b/c_glib/arrow-glib/composite-data-type.cpp index 8af1b0c8626..3c216867da2 100644 --- a/c_glib/arrow-glib/composite-data-type.cpp +++ b/c_glib/arrow-glib/composite-data-type.cpp @@ -51,6 +51,8 @@ G_BEGIN_DECLS * #GArrowDictionaryDataType is a class for dictionary data type. * * #GArrowRunEndEncodedDataType is a class for run end encoded data type. + * + * #GArrowFixedSizeListDataType is a class for fixed size list data type. */ G_DEFINE_TYPE(GArrowBaseListDataType, garrow_base_list_data_type, GARROW_TYPE_DATA_TYPE) @@ -65,6 +67,26 @@ garrow_base_list_data_type_class_init(GArrowBaseListDataTypeClass *klass) { } +/** + * garrow_base_list_data_type_get_field: + * @base_list_data_type: A #GArrowBaseListDataType. + * + * Returns: (transfer full): The field of value. + * + * Since: 21.0.0 + */ +GArrowField * +garrow_base_list_data_type_get_field(GArrowBaseListDataType *base_list_data_type) +{ + auto data_type = GARROW_DATA_TYPE(base_list_data_type); + auto arrow_data_type = garrow_data_type_get_raw(data_type); + auto arrow_base_list_data_type = + std::static_pointer_cast(arrow_data_type); + + auto arrow_field = arrow_base_list_data_type->value_field(); + return garrow_field_new_raw(&arrow_field, nullptr); +} + G_DEFINE_TYPE(GArrowListDataType, garrow_list_data_type, GARROW_TYPE_BASE_LIST_DATA_TYPE) static void @@ -116,16 +138,14 @@ garrow_list_data_type_get_value_field(GArrowListDataType *list_data_type) * Returns: (transfer full): The field of value. * * Since: 0.13.0 + * + * Deprecated: 21.0.0: + * Use garrow_base_list_data_type_get_field() instead. */ GArrowField * garrow_list_data_type_get_field(GArrowListDataType *list_data_type) { - auto data_type = GARROW_DATA_TYPE(list_data_type); - auto arrow_data_type = garrow_data_type_get_raw(data_type); - auto arrow_list_data_type = static_cast(arrow_data_type.get()); - - auto arrow_field = arrow_list_data_type->value_field(); - return garrow_field_new_raw(&arrow_field, nullptr); + return garrow_base_list_data_type_get_field(GARROW_BASE_LIST_DATA_TYPE(list_data_type)); } G_DEFINE_TYPE(GArrowLargeListDataType, garrow_large_list_data_type, GARROW_TYPE_DATA_TYPE) @@ -767,4 +787,93 @@ garrow_run_end_encoded_data_type_get_value_data_type( return garrow_data_type_new_raw(&arrow_value_data_type); } +enum { + PROP_LIST_SIZE = 1 +}; + +G_DEFINE_TYPE(GArrowFixedSizeListDataType, + garrow_fixed_size_list_data_type, + GARROW_TYPE_BASE_LIST_DATA_TYPE) + +static void +garrow_fixed_size_list_data_type_get_property(GObject *object, + guint prop_id, + GValue *value, + GParamSpec *pspec) +{ + auto arrow_data_type = garrow_data_type_get_raw(GARROW_DATA_TYPE(object)); + const auto arrow_fixed_size_list_type = + std::static_pointer_cast(arrow_data_type); + + switch (prop_id) { + case PROP_LIST_SIZE: + g_value_set_int(value, arrow_fixed_size_list_type->list_size()); + break; + default: + G_OBJECT_WARN_INVALID_PROPERTY_ID(object, prop_id, pspec); + break; + } +} + +static void +garrow_fixed_size_list_data_type_class_init(GArrowFixedSizeListDataTypeClass *klass) +{ + GObjectClass *gobject_class; + GParamSpec *spec; + + gobject_class = G_OBJECT_CLASS(klass); + gobject_class->get_property = garrow_fixed_size_list_data_type_get_property; + + spec = g_param_spec_int("list-size", + "List size", + "The list size of the elements", + 0, + G_MAXINT, + 0, + G_PARAM_READABLE); + g_object_class_install_property(gobject_class, PROP_LIST_SIZE, spec); +} + +static void +garrow_fixed_size_list_data_type_init(GArrowFixedSizeListDataType *object) +{ +} + +/** + * garrow_fixed_size_list_data_type_new_data_type: + * @value_type: The data type of an element of each list. + * @list_size: The size of each list. + * + * Returns: A newly created fixed size list data type. + * + * Since: 21.0.0 + */ +GArrowFixedSizeListDataType * +garrow_fixed_size_list_data_type_new_data_type(GArrowDataType *value_type, + gint32 list_size) +{ + auto arrow_value_type = garrow_data_type_get_raw(value_type); + auto arrow_fixed_size_list_data_type = + arrow::fixed_size_list(arrow_value_type, list_size); + return GARROW_FIXED_SIZE_LIST_DATA_TYPE( + garrow_data_type_new_raw(&arrow_fixed_size_list_data_type)); +} + +/** + * garrow_fixed_size_list_data_type_new_field: + * @field: The field of lists. + * @list_size: The size of value. + * + * Returns: A newly created fixed size list data type. + * + * Since: 21.0.0 + */ +GArrowFixedSizeListDataType * +garrow_fixed_size_list_data_type_new_field(GArrowField *field, gint32 list_size) +{ + auto arrow_field = garrow_field_get_raw(field); + auto arrow_fixed_size_list_data_type = arrow::fixed_size_list(arrow_field, list_size); + return GARROW_FIXED_SIZE_LIST_DATA_TYPE( + garrow_data_type_new_raw(&arrow_fixed_size_list_data_type)); +} G_END_DECLS diff --git a/c_glib/arrow-glib/composite-data-type.h b/c_glib/arrow-glib/composite-data-type.h index de9449c41cf..207647bd46a 100644 --- a/c_glib/arrow-glib/composite-data-type.h +++ b/c_glib/arrow-glib/composite-data-type.h @@ -38,6 +38,10 @@ struct _GArrowBaseListDataTypeClass GArrowDataTypeClass parent_class; }; +GARROW_AVAILABLE_IN_21_0 +GArrowField * +garrow_base_list_data_type_get_field(GArrowBaseListDataType *base_list_data_type); + #define GARROW_TYPE_LIST_DATA_TYPE (garrow_list_data_type_get_type()) GARROW_AVAILABLE_IN_ALL G_DECLARE_DERIVABLE_TYPE(GArrowListDataType, @@ -256,4 +260,25 @@ GArrowDataType * garrow_run_end_encoded_data_type_get_value_data_type( GArrowRunEndEncodedDataType *data_type); +#define GARROW_TYPE_FIXED_SIZE_LIST_DATA_TYPE \ + (garrow_fixed_size_list_data_type_get_type()) +GARROW_AVAILABLE_IN_21_0 +G_DECLARE_DERIVABLE_TYPE(GArrowFixedSizeListDataType, + garrow_fixed_size_list_data_type, + GARROW, + FIXED_SIZE_LIST_DATA_TYPE, + GArrowBaseListDataType) +struct _GArrowFixedSizeListDataTypeClass +{ + GArrowBaseListDataTypeClass parent_class; +}; + +GARROW_AVAILABLE_IN_21_0 +GArrowFixedSizeListDataType * +garrow_fixed_size_list_data_type_new_data_type(GArrowDataType *value_type, + gint32 list_size); + +GARROW_AVAILABLE_IN_21_0 +GArrowFixedSizeListDataType * +garrow_fixed_size_list_data_type_new_field(GArrowField *field, gint32 list_size); G_END_DECLS diff --git a/c_glib/test/test-fixed-size-list-data-type.rb b/c_glib/test/test-fixed-size-list-data-type.rb new file mode 100644 index 00000000000..8266fbccf36 --- /dev/null +++ b/c_glib/test/test-fixed-size-list-data-type.rb @@ -0,0 +1,61 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +class TestFixedSizeListDataType < Test::Unit::TestCase + sub_test_case(".new") do + def test_field + list_size = 5 + field_name = "bool_field" + field = Arrow::Field.new("bool_field", Arrow::BooleanDataType.new) + data_type = Arrow::FixedSizeListDataType.new(field, list_size) + assert_equal([field, list_size], [data_type.field, data_type.list_size]) + end + + def test_data_type + value_type = Arrow::BooleanDataType.new + list_size = 5 + data_type = Arrow::FixedSizeListDataType.new(value_type, list_size) + field = Arrow::Field.new("item", value_type) + assert_equal([field, list_size], [data_type.field, data_type.list_size]) + end + end + + sub_test_case("instance_methods") do + def setup + @list_size = 5 + @value_type = Arrow::BooleanDataType.new + @data_type = Arrow::FixedSizeListDataType.new(@value_type, @list_size) + end + + def test_name + assert_equal("fixed_size_list", @data_type.name); + end + + def test_to_s + assert_equal("fixed_size_list[5]", @data_type.to_s) + end + + def test_list_size + assert_equal(@list_size, @data_type.list_size) + end + + def test_field + field = Arrow::Field.new("item", @value_type) + assert_equal(field, @data_type.field) + end + end +end From a63ea3a10063ebb65a5241203802fc63b7ae90c8 Mon Sep 17 00:00:00 2001 From: Sutou Kouhei Date: Tue, 17 Jun 2025 11:04:48 +0900 Subject: [PATCH 61/63] GH-45195: [C++] Update bundled AWS SDK for C++ to 1.11.587 (#45306) ### Rationale for this change The current bundled version 1.10.55 was released on 2023-01-20: https://github.com/aws/aws-sdk-cpp/releases/tag/1.10.55 ### What changes are included in this PR? * Use the latest AWS SDK for C++ * Migrate to FetchContent from ExternalProject ### Are these changes tested? Yes. ### Are there any user-facing changes? Yes. * GitHub Issue: #45195 Authored-by: Sutou Kouhei Signed-off-by: Sutou Kouhei --- ci/rtools/README.md | 35 - ci/rtools/aws_c_common_ep.patch | 39 -- ci/rtools/aws_c_io_ep.patch | 56 -- ci/rtools/awssdk_ep.patch | 181 ----- cpp/CMakeLists.txt | 11 +- cpp/cmake_modules/AWSSDKVariables.cmake | 388 ----------- cpp/cmake_modules/BuildUtils.cmake | 12 +- cpp/cmake_modules/SetupCxxFlags.cmake | 8 +- cpp/cmake_modules/ThirdpartyToolchain.cmake | 645 ++++++------------ cpp/cmake_modules/aws-c-common-1208.patch | 83 +++ .../aws_sdk_cpp_generate_variables.sh | 72 -- cpp/thirdparty/README.md | 16 + cpp/thirdparty/update.rb | 189 +++++ cpp/thirdparty/versions.txt | 60 +- python/pyarrow/tests/test_fs.py | 2 + 15 files changed, 547 insertions(+), 1250 deletions(-) delete mode 100644 ci/rtools/README.md delete mode 100644 ci/rtools/aws_c_common_ep.patch delete mode 100644 ci/rtools/aws_c_io_ep.patch delete mode 100644 ci/rtools/awssdk_ep.patch delete mode 100644 cpp/cmake_modules/AWSSDKVariables.cmake create mode 100644 cpp/cmake_modules/aws-c-common-1208.patch delete mode 100755 cpp/cmake_modules/aws_sdk_cpp_generate_variables.sh create mode 100755 cpp/thirdparty/update.rb diff --git a/ci/rtools/README.md b/ci/rtools/README.md deleted file mode 100644 index 08b5ea7f513..00000000000 --- a/ci/rtools/README.md +++ /dev/null @@ -1,35 +0,0 @@ - - -# rtools40 patches for AWS SDK and related libs - -The patches in this directory are solely for the purpose of building Arrow C++ -under [Rtools40](https://cran.r-project.org/bin/windows/Rtools/rtools40.html) -and not used elsewhere. Once we've dropped support for Rtools40, we can consider -removing these patches. - -The larger reason these patches are needed is that Rtools provides their own -packages and their versions of the AWS libraries weren't compatible with CMake -3.25. Our solution was to bundle the AWS libs instead and these patches were -required to get them building under the Rtools40 environment. - -The patches were added while upgrading the minimum required CMake version to -3.25 in [GH-44950](https://github.com/apache/arrow/issues/44950). Please see the -associated PR, [GH-44989](https://github.com/apache/arrow/pull/44989), for more -context. diff --git a/ci/rtools/aws_c_common_ep.patch b/ci/rtools/aws_c_common_ep.patch deleted file mode 100644 index 94c84d0fe1b..00000000000 --- a/ci/rtools/aws_c_common_ep.patch +++ /dev/null @@ -1,39 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -diff --git a/include/aws/common/byte_order.inl b/include/aws/common/byte_order.inl -index 1204be0..0abd9cb 100644 ---- a/include/aws/common/byte_order.inl -+++ b/include/aws/common/byte_order.inl -@@ -13,7 +13,7 @@ - # include - #else - # include --#endif /* _MSC_VER */ -+#endif /* _WIN32 */ - - AWS_EXTERN_C_BEGIN - -@@ -39,7 +39,7 @@ AWS_STATIC_IMPL uint64_t aws_hton64(uint64_t x) { - uint64_t v; - __asm__("bswap %q0" : "=r"(v) : "0"(x)); - return v; --#elif defined(_MSC_VER) -+#elif defined(_WIN32) - return _byteswap_uint64(x); - #else - uint32_t low = x & UINT32_MAX; diff --git a/ci/rtools/aws_c_io_ep.patch b/ci/rtools/aws_c_io_ep.patch deleted file mode 100644 index a15d706ba12..00000000000 --- a/ci/rtools/aws_c_io_ep.patch +++ /dev/null @@ -1,56 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -diff --git a/source/windows/secure_channel_tls_handler.c b/source/windows/secure_channel_tls_handler.c -index 50caf02..29fe850 100644 ---- a/source/windows/secure_channel_tls_handler.c -+++ b/source/windows/secure_channel_tls_handler.c -@@ -35,6 +36,25 @@ - # pragma warning(disable : 4306) /* Identifier is type cast to a larger pointer. */ - #endif - -+#ifndef SP_PROT_TLS1_0_SERVER -+#define SP_PROT_TLS1_0_SERVER SP_PROT_TLS1_SERVER -+#endif -+#ifndef SP_PROT_TLS1_0_CLIENT -+#define SP_PROT_TLS1_0_CLIENT SP_PROT_TLS1_CLIENT -+#endif -+#ifndef SP_PROT_TLS1_1_SERVER -+#define SP_PROT_TLS1_1_SERVER 0x00000100 -+#endif -+#ifndef SP_PROT_TLS1_1_CLIENT -+#define SP_PROT_TLS1_1_CLIENT 0x00000200 -+#endif -+#ifndef SCH_USE_STRONG_CRYPTO -+#define SCH_USE_STRONG_CRYPTO 0x00400000 -+#endif -+#ifndef SECBUFFER_ALERT -+#define SECBUFFER_ALERT 0x11 -+#endif -+ - #define KB_1 1024 - #define READ_OUT_SIZE (16 * KB_1) - #define READ_IN_SIZE READ_OUT_SIZE -@@ -456,7 +476,7 @@ static int s_fillin_alpn_data( - - *extension_length += sizeof(uint32_t) + sizeof(uint16_t); - -- *extension_name = SecApplicationProtocolNegotiationExt_ALPN; -+ *extension_name = 2; - /*now add the protocols*/ - for (size_t i = 0; i < protocols_count; ++i) { - struct aws_byte_cursor *protocol_ptr = NULL; diff --git a/ci/rtools/awssdk_ep.patch b/ci/rtools/awssdk_ep.patch deleted file mode 100644 index bd26f853290..00000000000 --- a/ci/rtools/awssdk_ep.patch +++ /dev/null @@ -1,181 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -diff --git a/aws-cpp-sdk-core/include/aws/core/utils/Array.h b/aws-cpp-sdk-core/include/aws/core/utils/Array.h -index 2b5bbc566..7cb93bdf0 100644 ---- a/aws-cpp-sdk-core/include/aws/core/utils/Array.h -+++ b/aws-cpp-sdk-core/include/aws/core/utils/Array.h -@@ -54,7 +54,7 @@ namespace Aws - { - m_data.reset(Aws::NewArray(m_size, ARRAY_ALLOCATION_TAG)); - --#ifdef _WIN32 -+#ifdef _MSC_VER - std::copy(arrayToCopy, arrayToCopy + arraySize, stdext::checked_array_iterator< T * >(m_data.get(), m_size)); - #else - std::copy(arrayToCopy, arrayToCopy + arraySize, m_data.get()); -@@ -82,7 +82,7 @@ namespace Aws - if(arr->m_size > 0 && arr->m_data) - { - size_t arraySize = arr->m_size; --#ifdef _WIN32 -+#ifdef _MSC_VER - std::copy(arr->m_data.get(), arr->m_data.get() + arraySize, stdext::checked_array_iterator< T * >(m_data.get() + location, m_size)); - #else - std::copy(arr->m_data.get(), arr->m_data.get() + arraySize, m_data.get() + location); -@@ -101,7 +101,7 @@ namespace Aws - { - m_data.reset(Aws::NewArray(m_size, ARRAY_ALLOCATION_TAG)); - --#ifdef _WIN32 -+#ifdef _MSC_VER - std::copy(other.m_data.get(), other.m_data.get() + other.m_size, stdext::checked_array_iterator< T * >(m_data.get(), m_size)); - #else - std::copy(other.m_data.get(), other.m_data.get() + other.m_size, m_data.get()); -@@ -134,7 +134,7 @@ namespace Aws - { - m_data.reset(Aws::NewArray(m_size, ARRAY_ALLOCATION_TAG)); - --#ifdef _WIN32 -+#ifdef _MSC_VER - std::copy(other.m_data.get(), other.m_data.get() + other.m_size, stdext::checked_array_iterator< T * >(m_data.get(), m_size)); - #else - std::copy(other.m_data.get(), other.m_data.get() + other.m_size, m_data.get()); -diff --git a/aws-cpp-sdk-core/source/http/windows/WinHttpSyncHttpClient.cpp b/aws-cpp-sdk-core/source/http/windows/WinHttpSyncHttpClient.cpp -index 4dade6489..a0456cf8e 100644 ---- a/aws-cpp-sdk-core/source/http/windows/WinHttpSyncHttpClient.cpp -+++ b/aws-cpp-sdk-core/source/http/windows/WinHttpSyncHttpClient.cpp -@@ -22,6 +22,16 @@ - #include - #include - -+#ifndef WINHTTP_OPTION_WEB_SOCKET_KEEPALIVE_INTERVAL -+#define WINHTTP_OPTION_WEB_SOCKET_KEEPALIVE_INTERVAL 116 -+#endif -+#ifndef WINHTTP_FLAG_SECURE_PROTOCOL_TLS1_1 -+#define WINHTTP_FLAG_SECURE_PROTOCOL_TLS1_1 0x00000200 -+#endif -+#ifndef WINHTTP_FLAG_SECURE_PROTOCOL_TLS1_2 -+#define WINHTTP_FLAG_SECURE_PROTOCOL_TLS1_2 0x00000800 -+#endif -+ - using namespace Aws::Client; - using namespace Aws::Http; - using namespace Aws::Http::Standard; -@@ -272,7 +282,7 @@ bool WinHttpSyncHttpClient::DoQueryHeaders(void* hHttpRequest, std::shared_ptr(dwSize / sizeof(wchar_t))); - - WinHttpQueryHeaders(hHttpRequest, WINHTTP_QUERY_CONTENT_TYPE, nullptr, &contentTypeStr, &dwSize, 0); -- if (contentTypeStr[0] != NULL) -+ if (contentTypeStr[0]) - { - Aws::String contentStr = StringUtils::FromWString(contentTypeStr); - response->SetContentType(contentStr); -@@ -303,7 +313,7 @@ bool WinHttpSyncHttpClient::DoQueryHeaders(void* hHttpRequest, std::shared_ptrSetContentType(contentTypeStr); - AWS_LOGSTREAM_DEBUG(GetLogTag(), "Received content type " << contentTypeStr); -diff --git a/aws-cpp-sdk-core/source/http/windows/WinSyncHttpClient.cpp b/aws-cpp-sdk-core/source/http/windows/WinSyncHttpClient.cpp -index d7513cc3c..e390a8d4e 100644 ---- a/aws-cpp-sdk-core/source/http/windows/WinSyncHttpClient.cpp -+++ b/aws-cpp-sdk-core/source/http/windows/WinSyncHttpClient.cpp -@@ -349,7 +349,7 @@ std::shared_ptr WinSyncHttpClient::MakeRequest(const std::shared_p - } - } - -- if (!success && !IsRequestProcessingEnabled() || !ContinueRequest(*request)) -+ if ((!success && !IsRequestProcessingEnabled()) || !ContinueRequest(*request)) - { - response->SetClientErrorType(CoreErrors::USER_CANCELLED); - response->SetClientErrorMessage("Request processing disabled or continuation cancelled by user's continuation handler."); -diff --git a/aws-cpp-sdk-core/source/platform/windows/FileSystem.cpp b/aws-cpp-sdk-core/source/platform/windows/FileSystem.cpp -index 2ea82de6f..bc423441e 100644 ---- a/aws-cpp-sdk-core/source/platform/windows/FileSystem.cpp -+++ b/aws-cpp-sdk-core/source/platform/windows/FileSystem.cpp -@@ -11,7 +11,9 @@ - #include - #include - -+#ifdef _MSC_VER - #pragma warning( disable : 4996) -+#endif - - using namespace Aws::Utils; - namespace Aws -@@ -304,6 +306,9 @@ Aws::String CreateTempFilePath() - { - #ifdef _MSC_VER - #pragma warning(disable: 4996) // _CRT_SECURE_NO_WARNINGS -+#elif !defined(L_tmpnam_s) -+ // Definition from the MSVC stdio.h -+ #define L_tmpnam_s (sizeof("\\") + 16) - #endif - char s_tempName[L_tmpnam_s+1]; - -diff --git a/aws-cpp-sdk-core/source/platform/windows/OSVersionInfo.cpp b/aws-cpp-sdk-core/source/platform/windows/OSVersionInfo.cpp -index 0180f7fbf..3adbab313 100644 ---- a/aws-cpp-sdk-core/source/platform/windows/OSVersionInfo.cpp -+++ b/aws-cpp-sdk-core/source/platform/windows/OSVersionInfo.cpp -@@ -9,7 +9,9 @@ - - #include - -+#ifdef _MSC_VER - #pragma warning(disable: 4996) -+#endif - #include - #include - namespace Aws -diff --git a/aws-cpp-sdk-core/source/utils/crypto/factory/Factories.cpp b/aws-cpp-sdk-core/source/utils/crypto/factory/Factories.cpp -index 2ee517b48..3b0dce665 100644 ---- a/aws-cpp-sdk-core/source/utils/crypto/factory/Factories.cpp -+++ b/aws-cpp-sdk-core/source/utils/crypto/factory/Factories.cpp -@@ -939,7 +939,7 @@ std::shared_ptr Aws::Utils::Crypto::CreateSha256HMACIm - return GetSha256HMACFactory()->CreateImplementation(); - } - --#ifdef _WIN32 -+#ifdef _MSC_VER - #pragma warning( push ) - #pragma warning( disable : 4702 ) - #endif -@@ -1032,7 +1032,7 @@ std::shared_ptr Aws::Utils::Crypto::CreateAES_KeyWrapImplementa - return GetAES_KeyWrapFactory()->CreateImplementation(key); - } - --#ifdef _WIN32 -+#ifdef _MSC_VER - #pragma warning(pop) - #endif diff --git a/cpp/CMakeLists.txt b/cpp/CMakeLists.txt index 60a77c815e3..18ff93e965f 100644 --- a/cpp/CMakeLists.txt +++ b/cpp/CMakeLists.txt @@ -56,6 +56,11 @@ cmake_policy(SET CMP0068 NEW) # find_package() uses _ROOT variables. cmake_policy(SET CMP0074 NEW) +# https://cmake.org/cmake/help/latest/policy/CMP0077.html +# +# option() honors normal variables. +cmake_policy(SET CMP0077 NEW) + # https://cmake.org/cmake/help/latest/policy/CMP0091.html # # MSVC runtime library flags are selected by an abstraction. @@ -387,14 +392,10 @@ endif() # where to put generated archives (.a files) set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY "${BUILD_OUTPUT_ROOT_DIRECTORY}") -set(ARCHIVE_OUTPUT_DIRECTORY "${BUILD_OUTPUT_ROOT_DIRECTORY}") - # where to put generated libraries (.so files) set(CMAKE_LIBRARY_OUTPUT_DIRECTORY "${BUILD_OUTPUT_ROOT_DIRECTORY}") -set(LIBRARY_OUTPUT_DIRECTORY "${BUILD_OUTPUT_ROOT_DIRECTORY}") - # where to put generated binaries -set(EXECUTABLE_OUTPUT_PATH "${BUILD_OUTPUT_ROOT_DIRECTORY}") +set(CMAKE_RUNTIME_OUTPUT_DIRECTORY "${BUILD_OUTPUT_ROOT_DIRECTORY}") if(CMAKE_GENERATOR STREQUAL Xcode) # Xcode projects support multi-configuration builds. This forces a single output directory diff --git a/cpp/cmake_modules/AWSSDKVariables.cmake b/cpp/cmake_modules/AWSSDKVariables.cmake deleted file mode 100644 index 729790dd0f8..00000000000 --- a/cpp/cmake_modules/AWSSDKVariables.cmake +++ /dev/null @@ -1,388 +0,0 @@ -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# Generated by: -# $ cpp/cmake_modules/aws_sdk_cpp_generate_variables.sh 1.10.55 - -set(AWSSDK_UNUSED_DIRECTORIES - .github - AndroidSDKTesting - CI - Docs - android-build - android-unified-tests - aws-cpp-sdk-AWSMigrationHub - aws-cpp-sdk-access-management - aws-cpp-sdk-accessanalyzer - aws-cpp-sdk-account - aws-cpp-sdk-acm - aws-cpp-sdk-acm-pca - aws-cpp-sdk-alexaforbusiness - aws-cpp-sdk-amp - aws-cpp-sdk-amplify - aws-cpp-sdk-amplifybackend - aws-cpp-sdk-amplifyuibuilder - aws-cpp-sdk-apigateway - aws-cpp-sdk-apigatewaymanagementapi - aws-cpp-sdk-apigatewayv2 - aws-cpp-sdk-appconfig - aws-cpp-sdk-appconfigdata - aws-cpp-sdk-appflow - aws-cpp-sdk-appintegrations - aws-cpp-sdk-application-autoscaling - aws-cpp-sdk-application-insights - aws-cpp-sdk-applicationcostprofiler - aws-cpp-sdk-appmesh - aws-cpp-sdk-apprunner - aws-cpp-sdk-appstream - aws-cpp-sdk-appsync - aws-cpp-sdk-arc-zonal-shift - aws-cpp-sdk-athena - aws-cpp-sdk-auditmanager - aws-cpp-sdk-autoscaling - aws-cpp-sdk-autoscaling-plans - aws-cpp-sdk-awstransfer - aws-cpp-sdk-backup - aws-cpp-sdk-backup-gateway - aws-cpp-sdk-backupstorage - aws-cpp-sdk-batch - aws-cpp-sdk-billingconductor - aws-cpp-sdk-braket - aws-cpp-sdk-budgets - aws-cpp-sdk-ce - aws-cpp-sdk-chime - aws-cpp-sdk-chime-sdk-identity - aws-cpp-sdk-chime-sdk-media-pipelines - aws-cpp-sdk-chime-sdk-meetings - aws-cpp-sdk-chime-sdk-messaging - aws-cpp-sdk-chime-sdk-voice - aws-cpp-sdk-cleanrooms - aws-cpp-sdk-cloud9 - aws-cpp-sdk-cloudcontrol - aws-cpp-sdk-clouddirectory - aws-cpp-sdk-cloudformation - aws-cpp-sdk-cloudfront - aws-cpp-sdk-cloudfront-integration-tests - aws-cpp-sdk-cloudhsm - aws-cpp-sdk-cloudhsmv2 - aws-cpp-sdk-cloudsearch - aws-cpp-sdk-cloudsearchdomain - aws-cpp-sdk-cloudtrail - aws-cpp-sdk-codeartifact - aws-cpp-sdk-codebuild - aws-cpp-sdk-codecatalyst - aws-cpp-sdk-codecommit - aws-cpp-sdk-codedeploy - aws-cpp-sdk-codeguru-reviewer - aws-cpp-sdk-codeguruprofiler - aws-cpp-sdk-codepipeline - aws-cpp-sdk-codestar - aws-cpp-sdk-codestar-connections - aws-cpp-sdk-codestar-notifications - aws-cpp-sdk-cognito-idp - aws-cpp-sdk-cognito-sync - aws-cpp-sdk-cognitoidentity-integration-tests - aws-cpp-sdk-comprehend - aws-cpp-sdk-comprehendmedical - aws-cpp-sdk-compute-optimizer - aws-cpp-sdk-connect - aws-cpp-sdk-connect-contact-lens - aws-cpp-sdk-connectcampaigns - aws-cpp-sdk-connectcases - aws-cpp-sdk-connectparticipant - aws-cpp-sdk-controltower - aws-cpp-sdk-cur - aws-cpp-sdk-custom-service-integration-tests - aws-cpp-sdk-customer-profiles - aws-cpp-sdk-databrew - aws-cpp-sdk-dataexchange - aws-cpp-sdk-datapipeline - aws-cpp-sdk-datasync - aws-cpp-sdk-dax - aws-cpp-sdk-detective - aws-cpp-sdk-devicefarm - aws-cpp-sdk-devops-guru - aws-cpp-sdk-directconnect - aws-cpp-sdk-discovery - aws-cpp-sdk-dlm - aws-cpp-sdk-dms - aws-cpp-sdk-docdb - aws-cpp-sdk-docdb-elastic - aws-cpp-sdk-drs - aws-cpp-sdk-ds - aws-cpp-sdk-dynamodb - aws-cpp-sdk-dynamodb-integration-tests - aws-cpp-sdk-dynamodbstreams - aws-cpp-sdk-ebs - aws-cpp-sdk-ec2 - aws-cpp-sdk-ec2-instance-connect - aws-cpp-sdk-ec2-integration-tests - aws-cpp-sdk-ecr - aws-cpp-sdk-ecr-public - aws-cpp-sdk-ecs - aws-cpp-sdk-eks - aws-cpp-sdk-elastic-inference - aws-cpp-sdk-elasticache - aws-cpp-sdk-elasticbeanstalk - aws-cpp-sdk-elasticfilesystem - aws-cpp-sdk-elasticfilesystem-integration-tests - aws-cpp-sdk-elasticloadbalancing - aws-cpp-sdk-elasticloadbalancingv2 - aws-cpp-sdk-elasticmapreduce - aws-cpp-sdk-elastictranscoder - aws-cpp-sdk-email - aws-cpp-sdk-emr-containers - aws-cpp-sdk-emr-serverless - aws-cpp-sdk-es - aws-cpp-sdk-eventbridge - aws-cpp-sdk-eventbridge-tests - aws-cpp-sdk-events - aws-cpp-sdk-evidently - aws-cpp-sdk-finspace - aws-cpp-sdk-finspace-data - aws-cpp-sdk-firehose - aws-cpp-sdk-fis - aws-cpp-sdk-fms - aws-cpp-sdk-forecast - aws-cpp-sdk-forecastquery - aws-cpp-sdk-frauddetector - aws-cpp-sdk-fsx - aws-cpp-sdk-gamelift - aws-cpp-sdk-gamesparks - aws-cpp-sdk-glacier - aws-cpp-sdk-globalaccelerator - aws-cpp-sdk-glue - aws-cpp-sdk-grafana - aws-cpp-sdk-greengrass - aws-cpp-sdk-greengrassv2 - aws-cpp-sdk-groundstation - aws-cpp-sdk-guardduty - aws-cpp-sdk-health - aws-cpp-sdk-healthlake - aws-cpp-sdk-honeycode - aws-cpp-sdk-iam - aws-cpp-sdk-identitystore - aws-cpp-sdk-imagebuilder - aws-cpp-sdk-importexport - aws-cpp-sdk-inspector - aws-cpp-sdk-inspector2 - aws-cpp-sdk-iot - aws-cpp-sdk-iot-data - aws-cpp-sdk-iot-jobs-data - aws-cpp-sdk-iot-roborunner - aws-cpp-sdk-iot1click-devices - aws-cpp-sdk-iot1click-projects - aws-cpp-sdk-iotanalytics - aws-cpp-sdk-iotdeviceadvisor - aws-cpp-sdk-iotevents - aws-cpp-sdk-iotevents-data - aws-cpp-sdk-iotfleethub - aws-cpp-sdk-iotfleetwise - aws-cpp-sdk-iotsecuretunneling - aws-cpp-sdk-iotsitewise - aws-cpp-sdk-iotthingsgraph - aws-cpp-sdk-iottwinmaker - aws-cpp-sdk-iotwireless - aws-cpp-sdk-ivs - aws-cpp-sdk-ivschat - aws-cpp-sdk-kafka - aws-cpp-sdk-kafkaconnect - aws-cpp-sdk-kendra - aws-cpp-sdk-kendra-ranking - aws-cpp-sdk-keyspaces - aws-cpp-sdk-kinesis - aws-cpp-sdk-kinesis-integration-tests - aws-cpp-sdk-kinesis-video-archived-media - aws-cpp-sdk-kinesis-video-media - aws-cpp-sdk-kinesis-video-signaling - aws-cpp-sdk-kinesis-video-webrtc-storage - aws-cpp-sdk-kinesisanalytics - aws-cpp-sdk-kinesisanalyticsv2 - aws-cpp-sdk-kinesisvideo - aws-cpp-sdk-kms - aws-cpp-sdk-lakeformation - aws-cpp-sdk-lambda - aws-cpp-sdk-lambda-integration-tests - aws-cpp-sdk-lex - aws-cpp-sdk-lex-models - aws-cpp-sdk-lexv2-models - aws-cpp-sdk-lexv2-runtime - aws-cpp-sdk-license-manager - aws-cpp-sdk-license-manager-linux-subscriptions - aws-cpp-sdk-license-manager-user-subscriptions - aws-cpp-sdk-lightsail - aws-cpp-sdk-location - aws-cpp-sdk-logs - aws-cpp-sdk-logs-integration-tests - aws-cpp-sdk-lookoutequipment - aws-cpp-sdk-lookoutmetrics - aws-cpp-sdk-lookoutvision - aws-cpp-sdk-m2 - aws-cpp-sdk-machinelearning - aws-cpp-sdk-macie - aws-cpp-sdk-macie2 - aws-cpp-sdk-managedblockchain - aws-cpp-sdk-marketplace-catalog - aws-cpp-sdk-marketplace-entitlement - aws-cpp-sdk-marketplacecommerceanalytics - aws-cpp-sdk-mediaconnect - aws-cpp-sdk-mediaconvert - aws-cpp-sdk-medialive - aws-cpp-sdk-mediapackage - aws-cpp-sdk-mediapackage-vod - aws-cpp-sdk-mediastore - aws-cpp-sdk-mediastore-data - aws-cpp-sdk-mediastore-data-integration-tests - aws-cpp-sdk-mediatailor - aws-cpp-sdk-memorydb - aws-cpp-sdk-meteringmarketplace - aws-cpp-sdk-mgn - aws-cpp-sdk-migration-hub-refactor-spaces - aws-cpp-sdk-migrationhub-config - aws-cpp-sdk-migrationhuborchestrator - aws-cpp-sdk-migrationhubstrategy - aws-cpp-sdk-mobile - aws-cpp-sdk-monitoring - aws-cpp-sdk-mq - aws-cpp-sdk-mturk-requester - aws-cpp-sdk-mwaa - aws-cpp-sdk-neptune - aws-cpp-sdk-network-firewall - aws-cpp-sdk-networkmanager - aws-cpp-sdk-nimble - aws-cpp-sdk-oam - aws-cpp-sdk-omics - aws-cpp-sdk-opensearch - aws-cpp-sdk-opensearchserverless - aws-cpp-sdk-opsworks - aws-cpp-sdk-opsworkscm - aws-cpp-sdk-organizations - aws-cpp-sdk-outposts - aws-cpp-sdk-panorama - aws-cpp-sdk-personalize - aws-cpp-sdk-personalize-events - aws-cpp-sdk-personalize-runtime - aws-cpp-sdk-pi - aws-cpp-sdk-pinpoint - aws-cpp-sdk-pinpoint-email - aws-cpp-sdk-pinpoint-sms-voice-v2 - aws-cpp-sdk-pipes - aws-cpp-sdk-polly - aws-cpp-sdk-polly-sample - aws-cpp-sdk-pricing - aws-cpp-sdk-privatenetworks - aws-cpp-sdk-proton - aws-cpp-sdk-qldb - aws-cpp-sdk-qldb-session - aws-cpp-sdk-queues - aws-cpp-sdk-quicksight - aws-cpp-sdk-ram - aws-cpp-sdk-rbin - aws-cpp-sdk-rds - aws-cpp-sdk-rds-data - aws-cpp-sdk-rds-integration-tests - aws-cpp-sdk-redshift - aws-cpp-sdk-redshift-data - aws-cpp-sdk-redshift-integration-tests - aws-cpp-sdk-redshift-serverless - aws-cpp-sdk-rekognition - aws-cpp-sdk-resiliencehub - aws-cpp-sdk-resource-explorer-2 - aws-cpp-sdk-resource-groups - aws-cpp-sdk-resourcegroupstaggingapi - aws-cpp-sdk-robomaker - aws-cpp-sdk-rolesanywhere - aws-cpp-sdk-route53 - aws-cpp-sdk-route53-recovery-cluster - aws-cpp-sdk-route53-recovery-control-config - aws-cpp-sdk-route53-recovery-readiness - aws-cpp-sdk-route53domains - aws-cpp-sdk-route53resolver - aws-cpp-sdk-rum - aws-cpp-sdk-sagemaker - aws-cpp-sdk-sagemaker-a2i-runtime - aws-cpp-sdk-sagemaker-edge - aws-cpp-sdk-sagemaker-featurestore-runtime - aws-cpp-sdk-sagemaker-geospatial - aws-cpp-sdk-sagemaker-metrics - aws-cpp-sdk-sagemaker-runtime - aws-cpp-sdk-savingsplans - aws-cpp-sdk-scheduler - aws-cpp-sdk-schemas - aws-cpp-sdk-sdb - aws-cpp-sdk-secretsmanager - aws-cpp-sdk-securityhub - aws-cpp-sdk-securitylake - aws-cpp-sdk-serverlessrepo - aws-cpp-sdk-service-quotas - aws-cpp-sdk-servicecatalog - aws-cpp-sdk-servicecatalog-appregistry - aws-cpp-sdk-servicediscovery - aws-cpp-sdk-sesv2 - aws-cpp-sdk-shield - aws-cpp-sdk-signer - aws-cpp-sdk-simspaceweaver - aws-cpp-sdk-sms - aws-cpp-sdk-sms-voice - aws-cpp-sdk-snow-device-management - aws-cpp-sdk-snowball - aws-cpp-sdk-sns - aws-cpp-sdk-sqs - aws-cpp-sdk-sqs-integration-tests - aws-cpp-sdk-ssm - aws-cpp-sdk-ssm-contacts - aws-cpp-sdk-ssm-incidents - aws-cpp-sdk-ssm-sap - aws-cpp-sdk-sso - aws-cpp-sdk-sso-admin - aws-cpp-sdk-sso-oidc - aws-cpp-sdk-states - aws-cpp-sdk-storagegateway - aws-cpp-sdk-support - aws-cpp-sdk-support-app - aws-cpp-sdk-swf - aws-cpp-sdk-synthetics - aws-cpp-sdk-text-to-speech - aws-cpp-sdk-text-to-speech-tests - aws-cpp-sdk-textract - aws-cpp-sdk-timestream-query - aws-cpp-sdk-timestream-write - aws-cpp-sdk-transcribe - aws-cpp-sdk-transcribestreaming - aws-cpp-sdk-transcribestreaming-integration-tests - aws-cpp-sdk-translate - aws-cpp-sdk-voice-id - aws-cpp-sdk-waf - aws-cpp-sdk-waf-regional - aws-cpp-sdk-wafv2 - aws-cpp-sdk-wellarchitected - aws-cpp-sdk-wisdom - aws-cpp-sdk-workdocs - aws-cpp-sdk-worklink - aws-cpp-sdk-workmail - aws-cpp-sdk-workmailmessageflow - aws-cpp-sdk-workspaces - aws-cpp-sdk-workspaces-web - aws-cpp-sdk-xray - code-generation - crt - doc_crosslinks - doc_crosslinks_new - doxygen - generated - scripts - testing-resources) diff --git a/cpp/cmake_modules/BuildUtils.cmake b/cpp/cmake_modules/BuildUtils.cmake index 32d962b0cf2..134f47b12ef 100644 --- a/cpp/cmake_modules/BuildUtils.cmake +++ b/cpp/cmake_modules/BuildUtils.cmake @@ -552,7 +552,7 @@ function(ADD_BENCHMARK REL_BENCHMARK_NAME) if(EXISTS ${CMAKE_CURRENT_SOURCE_DIR}/${REL_BENCHMARK_NAME}.cc) # This benchmark has a corresponding .cc file, set it up as an executable. - set(BENCHMARK_PATH "${EXECUTABLE_OUTPUT_PATH}/${BENCHMARK_NAME}") + set(BENCHMARK_PATH "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/${BENCHMARK_NAME}") add_executable(${BENCHMARK_NAME} ${SOURCES}) if(ARG_STATIC_LINK_LIBS) @@ -581,7 +581,8 @@ function(ADD_BENCHMARK REL_BENCHMARK_NAME) PROPERTIES BUILD_WITH_INSTALL_RPATH TRUE INSTALL_RPATH_USE_LINK_PATH TRUE INSTALL_RPATH - "$ENV{CONDA_PREFIX}/lib;${EXECUTABLE_OUTPUT_PATH}") + "$ENV{CONDA_PREFIX}/lib;${CMAKE_RUNTIME_OUTPUT_DIRECTORY}" + ) endif() # Add test as dependency of relevant label targets @@ -682,7 +683,7 @@ function(ADD_TEST_CASE REL_TEST_NAME) # Make sure the executable name contains only hyphens, not underscores string(REPLACE "_" "-" TEST_NAME ${TEST_NAME}) - set(TEST_PATH "${EXECUTABLE_OUTPUT_PATH}/${TEST_NAME}") + set(TEST_PATH "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/${TEST_NAME}") add_executable(${TEST_NAME} ${SOURCES}) # With OSX and conda, we need to set the correct RPATH so that dependencies @@ -695,7 +696,8 @@ function(ADD_TEST_CASE REL_TEST_NAME) PROPERTIES BUILD_WITH_INSTALL_RPATH TRUE INSTALL_RPATH_USE_LINK_PATH TRUE INSTALL_RPATH - "${EXECUTABLE_OUTPUT_PATH};$ENV{CONDA_PREFIX}/lib") + "${CMAKE_RUNTIME_OUTPUT_DIRECTORY};$ENV{CONDA_PREFIX}/lib" + ) endif() # Ensure using bundled GoogleTest when we use bundled GoogleTest. @@ -826,7 +828,7 @@ function(ADD_ARROW_EXAMPLE REL_EXAMPLE_NAME) if(EXISTS ${CMAKE_SOURCE_DIR}/examples/arrow/${REL_EXAMPLE_NAME}.cc) # This example has a corresponding .cc file, set it up as an executable. - set(EXAMPLE_PATH "${EXECUTABLE_OUTPUT_PATH}/${EXAMPLE_NAME}") + set(EXAMPLE_PATH "${CMAKE_RUNTIME_OUTPUT_DIRECTORY}/${EXAMPLE_NAME}") add_executable(${EXAMPLE_NAME} "${REL_EXAMPLE_NAME}.cc" ${ARG_EXTRA_SOURCES}) target_link_libraries(${EXAMPLE_NAME} ${ARROW_EXAMPLE_LINK_LIBS}) add_dependencies(runexample ${EXAMPLE_NAME}) diff --git a/cpp/cmake_modules/SetupCxxFlags.cmake b/cpp/cmake_modules/SetupCxxFlags.cmake index fdb28b540e2..9ecca91ecd5 100644 --- a/cpp/cmake_modules/SetupCxxFlags.cmake +++ b/cpp/cmake_modules/SetupCxxFlags.cmake @@ -152,8 +152,6 @@ set(CMAKE_CXX_EXTENSIONS OFF) # shared libraries set(CMAKE_POSITION_INDEPENDENT_CODE ${ARROW_POSITION_INDEPENDENT_CODE}) -string(TOUPPER ${CMAKE_BUILD_TYPE} CMAKE_BUILD_TYPE) - set(UNKNOWN_COMPILER_MESSAGE "Unknown compiler: ${CMAKE_CXX_COMPILER_ID} ${CMAKE_CXX_COMPILER_VERSION}") @@ -265,7 +263,7 @@ endif() # `RELEASE`, then it will default to `PRODUCTION`. The goal of defaulting to # `CHECKIN` is to avoid friction with long response time from CI. if(NOT BUILD_WARNING_LEVEL) - if("${CMAKE_BUILD_TYPE}" STREQUAL "RELEASE") + if("${UPPERCASE_BUILD_TYPE}" STREQUAL "RELEASE") set(BUILD_WARNING_LEVEL PRODUCTION) else() set(BUILD_WARNING_LEVEL CHECKIN) @@ -602,7 +600,7 @@ if(NOT WIN32 AND NOT APPLE) if(MUST_USE_GOLD) message(STATUS "Using hard-wired gold linker (version ${GOLD_VERSION})") if(ARROW_BUGGY_GOLD) - if("${ARROW_LINK}" STREQUAL "d" AND "${CMAKE_BUILD_TYPE}" STREQUAL "RELEASE") + if("${ARROW_LINK}" STREQUAL "d" AND "${UPPERCASE_BUILD_TYPE}" STREQUAL "RELEASE") message(SEND_ERROR "Configured to use buggy gold with dynamic linking " "in a RELEASE build") endif() @@ -808,7 +806,7 @@ if(CMAKE_SYSTEM_NAME STREQUAL "Emscripten") set(CMAKE_SHARED_LINKER_FLAGS "-sSIDE_MODULE=1 ${ARROW_EMSCRIPTEN_LINKER_FLAGS}") if(ARROW_TESTING) # flags for building test executables for use in node - if("${CMAKE_BUILD_TYPE}" STREQUAL "RELEASE") + if("${UPPERCASE_BUILD_TYPE}" STREQUAL "RELEASE") set(CMAKE_EXE_LINKER_FLAGS "${ARROW_EMSCRIPTEN_LINKER_FLAGS} -sALLOW_MEMORY_GROWTH -lnodefs.js -lnoderawfs.js --pre-js ${BUILD_SUPPORT_DIR}/emscripten-test-init.js" ) diff --git a/cpp/cmake_modules/ThirdpartyToolchain.cmake b/cpp/cmake_modules/ThirdpartyToolchain.cmake index 412207c4bfd..69ca6a69df5 100644 --- a/cpp/cmake_modules/ThirdpartyToolchain.cmake +++ b/cpp/cmake_modules/ThirdpartyToolchain.cmake @@ -1012,7 +1012,7 @@ endif() set(MAKE_BUILD_ARGS "-j${NPROC}") include(FetchContent) -set(FC_DECLARE_COMMON_OPTIONS) +set(FC_DECLARE_COMMON_OPTIONS SYSTEM) if(CMAKE_VERSION VERSION_GREATER_EQUAL 3.28) list(APPEND FC_DECLARE_COMMON_OPTIONS EXCLUDE_FROM_ALL TRUE) endif() @@ -1020,13 +1020,17 @@ endif() macro(prepare_fetchcontent) set(BUILD_SHARED_LIBS OFF) set(BUILD_STATIC_LIBS ON) - set(CMAKE_COMPILE_WARNING_AS_ERROR FALSE) - set(CMAKE_EXPORT_NO_PACKAGE_REGISTRY TRUE) + set(BUILD_TESTING OFF) + set(CMAKE_ARCHIVE_OUTPUT_DIRECTORY "") + set(CMAKE_COMPILE_WARNING_AS_ERROR OFF) + set(CMAKE_EXPORT_NO_PACKAGE_REGISTRY OFF) + set(CMAKE_LIBRARY_OUTPUT_DIRECTORY "") set(CMAKE_MACOSX_RPATH ${ARROW_INSTALL_NAME_RPATH}) # We set CMAKE_POLICY_VERSION_MINIMUM temporarily due to failures with CMake 4 # We should remove it once we have updated the dependencies: # https://github.com/apache/arrow/issues/45985 set(CMAKE_POLICY_VERSION_MINIMUM 3.5) + set(CMAKE_RUNTIME_OUTPUT_DIRECTORY "") if(MSVC) string(REPLACE "/WX" "" CMAKE_C_FLAGS_DEBUG "${CMAKE_C_FLAGS_DEBUG}") @@ -1058,7 +1062,7 @@ macro(build_boost) if(ARROW_BOOST_REQUIRE_LIBRARY) set(BOOST_LIB_DIR "${BOOST_PREFIX}/stage/lib") set(BOOST_BUILD_LINK "static") - if("${CMAKE_BUILD_TYPE}" STREQUAL "DEBUG") + if("${UPPERCASE_BUILD_TYPE}" STREQUAL "DEBUG") set(BOOST_BUILD_VARIANT "debug") else() set(BOOST_BUILD_VARIANT "release") @@ -2657,11 +2661,13 @@ if(ARROW_WITH_ZLIB) resolve_dependency(ZLIB PC_PACKAGE_NAMES zlib) endif() -macro(build_lz4) +function(build_lz4) message(STATUS "Building LZ4 from source using FetchContent") # Set LZ4 as vendored - set(LZ4_VENDORED TRUE) + set(LZ4_VENDORED + TRUE + PARENT_SCOPE) # Declare the content fetchcontent_declare(lz4 @@ -2692,7 +2698,10 @@ macro(build_lz4) # Add to bundled static libs. # We must use lz4_static (not imported target) not LZ4::lz4 (imported target). list(APPEND ARROW_BUNDLED_STATIC_LIBS lz4_static) -endmacro() + set(ARROW_BUNDLED_STATIC_LIBS + ${ARROW_BUNDLED_STATIC_LIBS} + PARENT_SCOPE) +endfunction() if(ARROW_WITH_LZ4) resolve_dependency(lz4 @@ -4778,7 +4787,7 @@ function(build_orc) ${ARROW_PROTOBUF_PROTOC} ${ARROW_ZSTD_LIBZSTD} ${Snappy_TARGET} - LZ4::lz4 + ${ORC_LZ4_TARGET} ZLIB::ZLIB) add_library(orc::orc STATIC IMPORTED) set_target_properties(orc::orc PROPERTIES IMPORTED_LOCATION "${ORC_STATIC_LIB}") @@ -5055,439 +5064,217 @@ endif() # ---------------------------------------------------------------------- # AWS SDK for C++ -include(AWSSDKVariables) +function(build_awssdk) + message(STATUS "Building AWS SDK for C++ from source") -macro(build_awssdk) - message(STATUS "Building AWS C++ SDK from source") - set(AWSSDK_PREFIX "${CMAKE_CURRENT_BINARY_DIR}/awssdk_ep-install") - set(AWSSDK_INCLUDE_DIR "${AWSSDK_PREFIX}/include") - - # The AWS SDK has a few warnings around shortening lengths - set(AWS_C_FLAGS "${EP_C_FLAGS}") - set(AWS_CXX_FLAGS "${EP_CXX_FLAGS}") - if(CMAKE_CXX_COMPILER_ID STREQUAL "AppleClang" OR CMAKE_CXX_COMPILER_ID STREQUAL - "Clang") - # Negate warnings that AWS SDK cannot build under - string(APPEND AWS_C_FLAGS " -Wno-error=shorten-64-to-32") - string(APPEND AWS_CXX_FLAGS " -Wno-error=shorten-64-to-32") - endif() - if(NOT MSVC) - string(APPEND AWS_C_FLAGS " -Wno-deprecated") - string(APPEND AWS_CXX_FLAGS " -Wno-deprecated") + # aws-c-common must be the first product because others depend on + # this. + set(AWSSDK_PRODUCTS aws-c-common) + if(LINUX) + list(APPEND AWSSDK_PRODUCTS aws-lc s2n-tls) endif() - # GH-44950: This is required to build under Rtools40 and we may be able to - # remove it if/when we no longer need to build under Rtools40 - if(WIN32 AND NOT MSVC) - string(APPEND - AWS_C_FLAGS - " -D_WIN32_WINNT=0x0601 -D__USE_MINGW_ANSI_STDIO=1 -Wno-error -Wno-error=format= -Wno-error=format-extra-args -Wno-unused-local-typedefs -Wno-unused-variable" - ) - string(APPEND - AWS_CXX_FLAGS - " -D_WIN32_WINNT=0x0601 -D__USE_MINGW_ANSI_STDIO=1 -Wno-error -Wno-error=format= -Wno-error=format-extra-args -Wno-unused-local-typedefs -Wno-unused-variable" + list(APPEND + AWSSDK_PRODUCTS + # We can't sort this in alphabetical order because some + # products depend on other products. + aws-checksums + aws-c-cal + aws-c-io + aws-c-event-stream + aws-c-sdkutils + aws-c-compression + aws-c-http + aws-c-mqtt + aws-c-auth + aws-c-s3 + aws-crt-cpp + aws-sdk-cpp) + set(AWS_SDK_CPP_SOURCE_URL "${AWSSDK_SOURCE_URL}") + set(ARROW_AWS_SDK_CPP_BUILD_SHA256_CHECKSUM "${ARROW_AWSSDK_BUILD_SHA256_CHECKSUM}") + foreach(AWSSDK_PRODUCT ${AWSSDK_PRODUCTS}) + # aws-c-cal -> + # AWS-C-CAL + string(TOUPPER "${AWSSDK_PRODUCT}" BASE_VARIABLE_NAME) + # AWS-C-CAL -> + # AWS_C_CAL + string(REGEX REPLACE "-" "_" BASE_VARIABLE_NAME "${BASE_VARIABLE_NAME}") + if(MINGW AND AWSSDK_PRODUCT STREQUAL "aws-c-common") + find_program(PATCH patch REQUIRED) + set(${BASE_VARIABLE_NAME}_PATCH_COMMAND + ${PATCH} -p1 -i ${CMAKE_CURRENT_LIST_DIR}/aws-c-common-1208.patch) + endif() + fetchcontent_declare(${AWSSDK_PRODUCT} + ${FC_DECLARE_COMMON_OPTIONS} OVERRIDE_FIND_PACKAGE + PATCH_COMMAND ${${BASE_VARIABLE_NAME}_PATCH_COMMAND} + URL ${${BASE_VARIABLE_NAME}_SOURCE_URL} + URL_HASH "SHA256=${ARROW_${BASE_VARIABLE_NAME}_BUILD_SHA256_CHECKSUM}" ) - endif() + endforeach() - set(AWSSDK_COMMON_CMAKE_ARGS - ${EP_COMMON_CMAKE_ARGS} - -DCMAKE_C_FLAGS=${AWS_C_FLAGS} - -DCMAKE_CXX_FLAGS=${AWS_CXX_FLAGS} - -DCPP_STANDARD=${CMAKE_CXX_STANDARD} - -DCMAKE_INSTALL_PREFIX=${AWSSDK_PREFIX} - -DCMAKE_PREFIX_PATH=${AWSSDK_PREFIX} - -DENABLE_TESTING=OFF - -DENABLE_UNITY_BUILD=ON - -DOPENSSL_CRYPTO_LIBRARY=${OPENSSL_CRYPTO_LIBRARY} - -DOPENSSL_INCLUDE_DIR=${OPENSSL_INCLUDE_DIR} - -DOPENSSL_SSL_LIBRARY=${OPENSSL_SSL_LIBRARY} - -Dcrypto_INCLUDE_DIR=${OPENSSL_INCLUDE_DIR} - -Dcrypto_LIBRARY=${OPENSSL_CRYPTO_LIBRARY}) - if(ARROW_OPENSSL_USE_SHARED) - list(APPEND AWSSDK_COMMON_CMAKE_ARGS - -Dcrypto_SHARED_LIBRARY=${OPENSSL_CRYPTO_LIBRARY}) - else() - list(APPEND AWSSDK_COMMON_CMAKE_ARGS - -Dcrypto_STATIC_LIBRARY=${OPENSSL_CRYPTO_LIBRARY}) - endif() - set(AWSSDK_CMAKE_ARGS - ${AWSSDK_COMMON_CMAKE_ARGS} - -DBUILD_DEPS=OFF - -DBUILD_ONLY=config\\$s3\\$transfer\\$identity-management\\$sts - -DMINIMIZE_SIZE=ON) - # Remove unused directories to save build directory storage. - # 807MB -> 31MB - set(AWSSDK_PATCH_COMMAND ${CMAKE_COMMAND} -E) - if(CMAKE_VERSION VERSION_LESS 3.17) - list(APPEND AWSSDK_PATCH_COMMAND remove_directory) - else() - list(APPEND AWSSDK_PATCH_COMMAND rm -rf) + prepare_fetchcontent() + set(BUILD_DEPS OFF) + set(BUILD_TOOL OFF) + set(ENABLE_TESTING OFF) + set(IN_SOURCE_BUILD ON) + set(MINIMIZE_SIZE ON) + set(USE_OPENSSL ON) + + # For aws-c-common + if(MINGW) + # PPROCESSOR_NUMBER requires Windows 7 or later. + # + # 0x0601 == _WIN32_WINNT_WIN7 + string(APPEND CMAKE_C_FLAGS " -D_WIN32_WINNT=0x0601") + string(APPEND CMAKE_CXX_FLAGS " -D_WIN32_WINNT=0x0601") endif() - list(APPEND AWSSDK_PATCH_COMMAND ${AWSSDK_UNUSED_DIRECTORIES}) - # Patch parts of the AWSSDK EP so it builds cleanly under Rtools40 - if(WIN32 AND NOT MSVC) - find_program(PATCH patch REQUIRED) - # Patch aws_c_common to build under Rtools40 - set(AWS_C_COMMON_PATCH_COMMAND ${PATCH} -p1 -i - ${CMAKE_SOURCE_DIR}/../ci/rtools/aws_c_common_ep.patch) - message(STATUS "Hello ${AWS_C_COMMON_PATCH_COMMAND}") - # aws_c_io_ep to build under Rtools40 - set(AWS_C_IO_PATCH_COMMAND ${PATCH} -p1 -i - ${CMAKE_SOURCE_DIR}/../ci/rtools/aws_c_io_ep.patch) - message(STATUS "Hello ${AWS_C_IO_PATCH_COMMAND}") - # awssdk_ep to build under Rtools40 - list(APPEND - AWSSDK_PATCH_COMMAND - && - ${PATCH} - -p1 - -i - ${CMAKE_SOURCE_DIR}/../ci/rtools/awssdk_ep.patch) - message(STATUS "Hello ${AWSSDK_PATCH_COMMAND}") - endif() + # For aws-lc + set(DISABLE_GO ON) + set(DISABLE_PERL ON) - if(UNIX) - # on Linux and macOS curl seems to be required - find_curl() - get_filename_component(CURL_ROOT_HINT "${CURL_INCLUDE_DIRS}" DIRECTORY) - get_filename_component(ZLIB_ROOT_HINT "${ZLIB_INCLUDE_DIRS}" DIRECTORY) + # For s2n-tls + set(crypto_INCLUDE_DIR "$") + set(crypto_STATIC_LIBRARY "$") + set(S2N_INTERN_LIBCRYPTO ON) - # provide hint for AWS SDK to link with the already located libcurl and zlib - list(APPEND - AWSSDK_CMAKE_ARGS - -DCURL_INCLUDE_DIR=${CURL_ROOT_HINT}/include - -DCURL_LIBRARY=${CURL_ROOT_HINT}/lib - -DZLIB_INCLUDE_DIR=${ZLIB_ROOT_HINT}/include - -DZLIB_LIBRARY=${ZLIB_ROOT_HINT}/lib) - endif() - - file(MAKE_DIRECTORY ${AWSSDK_INCLUDE_DIR}) - - # AWS C++ SDK related libraries to link statically - set(_AWSSDK_LIBS - aws-cpp-sdk-identity-management - aws-cpp-sdk-sts - aws-cpp-sdk-cognito-identity - aws-cpp-sdk-s3 - aws-cpp-sdk-core - aws-crt-cpp - aws-c-s3 - aws-c-auth - aws-c-mqtt - aws-c-http - aws-c-compression - aws-c-sdkutils - aws-c-event-stream - aws-c-io - aws-c-cal - aws-checksums - aws-c-common) - - # aws-lc needs to be installed on a separate folder to hide from unintended use - set(AWS_LC_PREFIX "${CMAKE_CURRENT_BINARY_DIR}/aws_lc_ep-install") - set(AWS_LC_INCLUDE_DIR "${AWS_LC_PREFIX}/include") - - if(UNIX AND NOT APPLE) # aws-lc and s2n-tls only needed on linux - file(MAKE_DIRECTORY ${AWS_LC_INCLUDE_DIR}) - list(APPEND _AWSSDK_LIBS s2n-tls aws-lc) - endif() - - set(AWSSDK_LIBRARIES) - foreach(_AWSSDK_LIB ${_AWSSDK_LIBS}) - # aws-c-common -> AWS-C-COMMON - string(TOUPPER ${_AWSSDK_LIB} _AWSSDK_LIB_UPPER) - # AWS-C-COMMON -> AWS_C_COMMON - string(REPLACE "-" "_" _AWSSDK_LIB_NAME_PREFIX ${_AWSSDK_LIB_UPPER}) - set(_AWSSDK_STATIC_LIBRARY - "${AWSSDK_PREFIX}/lib/${CMAKE_STATIC_LIBRARY_PREFIX}${_AWSSDK_LIB}${CMAKE_STATIC_LIBRARY_SUFFIX}" - ) - if(${_AWSSDK_LIB} STREQUAL "s2n-tls") # Build output of s2n-tls is libs2n.a - set(_AWSSDK_STATIC_LIBRARY - "${AWSSDK_PREFIX}/lib/${CMAKE_STATIC_LIBRARY_PREFIX}s2n${CMAKE_STATIC_LIBRARY_SUFFIX}" - ) - elseif(${_AWSSDK_LIB} STREQUAL "aws-lc") # We only need libcrypto from aws-lc - set(_AWSSDK_STATIC_LIBRARY - "${AWS_LC_PREFIX}/lib/${CMAKE_STATIC_LIBRARY_PREFIX}crypto${CMAKE_STATIC_LIBRARY_SUFFIX}" - ) - endif() - if(${_AWSSDK_LIB} MATCHES "^aws-cpp-sdk-") - set(_AWSSDK_TARGET_NAME ${_AWSSDK_LIB}) - elseif(${_AWSSDK_LIB} STREQUAL "aws-lc") - set(_AWSSDK_TARGET_NAME AWS::crypto) - else() - set(_AWSSDK_TARGET_NAME AWS::${_AWSSDK_LIB}) - endif() - add_library(${_AWSSDK_TARGET_NAME} STATIC IMPORTED) - set_target_properties(${_AWSSDK_TARGET_NAME} PROPERTIES IMPORTED_LOCATION - ${_AWSSDK_STATIC_LIBRARY}) - target_include_directories(${_AWSSDK_TARGET_NAME} BEFORE - INTERFACE "${AWSSDK_INCLUDE_DIR}") - if(${_AWSSDK_LIB} STREQUAL "aws-lc") - set_target_properties(${_AWSSDK_TARGET_NAME} PROPERTIES IMPORTED_LOCATION - ${_AWSSDK_STATIC_LIBRARY}) - target_include_directories(${_AWSSDK_TARGET_NAME} BEFORE - INTERFACE "${AWS_LC_INCLUDE_DIR}") - endif() - set("${_AWSSDK_LIB_NAME_PREFIX}_STATIC_LIBRARY" ${_AWSSDK_STATIC_LIBRARY}) + # For aws-lc and s2n-tls + # + # Link time optimization is causing trouble like GH-34349 + string(REPLACE "-flto=auto" "" CMAKE_C_FLAGS "${CMAKE_C_FLAGS}") + string(REPLACE "-ffat-lto-objects" "" CMAKE_C_FLAGS "${CMAKE_C_FLAGS}") - if(NOT ${_AWSSDK_LIB} STREQUAL "aws-lc") - # aws-lc only linked against s2n but not arrow - list(APPEND AWSSDK_LIBRARIES ${_AWSSDK_TARGET_NAME}) - endif() - endforeach() + # For aws-c-io + if(MINGW AND CMAKE_CXX_COMPILER_VERSION VERSION_LESS "9") + # This is for RTools 40. We can remove this after we dropped + # support for R < 4.2. schannel.h in RTools 40 is old. - externalproject_add(aws_c_common_ep - ${EP_COMMON_OPTIONS} - URL ${AWS_C_COMMON_SOURCE_URL} - URL_HASH "SHA256=${ARROW_AWS_C_COMMON_BUILD_SHA256_CHECKSUM}" - PATCH_COMMAND ${AWS_C_COMMON_PATCH_COMMAND} - CMAKE_ARGS ${AWSSDK_COMMON_CMAKE_ARGS} - BUILD_BYPRODUCTS ${AWS_C_COMMON_STATIC_LIBRARY}) - add_dependencies(AWS::aws-c-common aws_c_common_ep) - - set(AWS_CHECKSUMS_CMAKE_ARGS ${AWSSDK_COMMON_CMAKE_ARGS}) - if(NOT WIN32) - # On non-Windows, always build in release mode. - # Especially with gcc, debug builds can fail with "asm constraint" errors: - # https://github.com/TileDB-Inc/TileDB/issues/1351 - list(APPEND AWS_CHECKSUMS_CMAKE_ARGS -DCMAKE_BUILD_TYPE=Release) + # For schannel.h + # + # See also: + # https://learn.microsoft.com/en-us/windows/win32/api/schannel/ns-schannel-schannel_cred + string(APPEND CMAKE_C_FLAGS " -DSP_PROT_TLS1_0_SERVER=0x00000040") + string(APPEND CMAKE_C_FLAGS " -DSP_PROT_TLS1_0_CLIENT=0x00000080") + string(APPEND CMAKE_C_FLAGS " -DSP_PROT_TLS1_1_SERVER=0x00000100") + string(APPEND CMAKE_C_FLAGS " -DSP_PROT_TLS1_1_CLIENT=0x00000200") + string(APPEND CMAKE_C_FLAGS " -DSP_PROT_TLS1_2_SERVER=0x00000400") + string(APPEND CMAKE_C_FLAGS " -DSP_PROT_TLS1_2_CLIENT=0x00000800") + string(APPEND CMAKE_C_FLAGS " -DSP_PROT_TLS1_3_SERVER=0x00001000") + string(APPEND CMAKE_C_FLAGS " -DSP_PROT_TLS1_3_CLIENT=0x00002000") + string(APPEND CMAKE_C_FLAGS " -DSCH_USE_STRONG_CRYPTO=0x00400000") + + # For sspi.h + # + # See also: + # https://learn.microsoft.com/en-us/windows/win32/api/sspi/ne-sspi-sec_application_protocol_negotiation_ext + string(APPEND CMAKE_C_FLAGS " -DSecApplicationProtocolNegotiationExt_ALPN=2") + # See also: + # https://learn.microsoft.com/en-us/windows/win32/api/sspi/ns-sspi-secbuffer + string(APPEND CMAKE_C_FLAGS " -DSECBUFFER_ALERT=17") endif() - externalproject_add(aws_checksums_ep - ${EP_COMMON_OPTIONS} - URL ${AWS_CHECKSUMS_SOURCE_URL} - URL_HASH "SHA256=${ARROW_AWS_CHECKSUMS_BUILD_SHA256_CHECKSUM}" - CMAKE_ARGS ${AWS_CHECKSUMS_CMAKE_ARGS} - BUILD_BYPRODUCTS ${AWS_CHECKSUMS_STATIC_LIBRARY} - DEPENDS aws_c_common_ep) - add_dependencies(AWS::aws-checksums aws_checksums_ep) - - if("s2n-tls" IN_LIST _AWSSDK_LIBS) - # Remove unused directories to save build directory storage. - # 169MB -> 105MB - set(AWS_LC_PATCH_COMMAND ${CMAKE_COMMAND} -E) - if(CMAKE_VERSION VERSION_LESS 3.17) - list(APPEND AWS_LC_PATCH_COMMAND remove_directory) - else() - list(APPEND AWS_LC_PATCH_COMMAND rm -rf) - endif() - list(APPEND AWS_LC_PATCH_COMMAND fuzz) - set(AWS_LC_C_FLAGS ${EP_C_FLAGS}) - string(APPEND AWS_LC_C_FLAGS " -Wno-error=overlength-strings -Wno-error=pedantic") - # Link time optimization is causing trouble like #34349 - string(REPLACE "-flto=auto" "" AWS_LC_C_FLAGS "${AWS_LC_C_FLAGS}") - string(REPLACE "-ffat-lto-objects" "" AWS_LC_C_FLAGS "${AWS_LC_C_FLAGS}") - - set(AWS_LC_CMAKE_ARGS ${AWSSDK_COMMON_CMAKE_ARGS}) - list(APPEND AWS_LC_CMAKE_ARGS -DCMAKE_INSTALL_PREFIX=${AWS_LC_PREFIX} - -DCMAKE_C_FLAGS=${AWS_LC_C_FLAGS}) - - externalproject_add(aws_lc_ep - ${EP_COMMON_OPTIONS} - URL ${AWS_LC_SOURCE_URL} - URL_HASH "SHA256=${ARROW_AWS_LC_BUILD_SHA256_CHECKSUM}" - PATCH_COMMAND ${AWS_LC_PATCH_COMMAND} - CMAKE_ARGS ${AWS_LC_CMAKE_ARGS} - BUILD_BYPRODUCTS ${AWS_LC_STATIC_LIBRARY}) - add_dependencies(AWS::crypto aws_lc_ep) - - set(S2N_TLS_C_FLAGS ${EP_C_FLAGS}) - # Link time optimization is causing trouble like #34349 - string(REPLACE "-flto=auto" "" S2N_TLS_C_FLAGS "${S2N_TLS_C_FLAGS}") - string(REPLACE "-ffat-lto-objects" "" S2N_TLS_C_FLAGS "${S2N_TLS_C_FLAGS}") - - set(S2N_TLS_CMAKE_ARGS ${AWSSDK_COMMON_CMAKE_ARGS}) - list(APPEND - S2N_TLS_CMAKE_ARGS - # internalize libcrypto to avoid name conflict with OpenSSL - -DS2N_INTERN_LIBCRYPTO=ON - # path to find crypto provided by aws-lc - -DCMAKE_PREFIX_PATH=${AWS_LC_PREFIX} - -DCMAKE_C_FLAGS=${S2N_TLS_C_FLAGS} - # paths to find crypto provided by aws-lc - -Dcrypto_INCLUDE_DIR=${AWS_LC_PREFIX}/include - -Dcrypto_LIBRARY=${AWS_LC_STATIC_LIBRARY} - -Dcrypto_STATIC_LIBRARY=${AWS_LC_STATIC_LIBRARY}) - - externalproject_add(s2n_tls_ep - ${EP_COMMON_OPTIONS} - URL ${S2N_TLS_SOURCE_URL} - URL_HASH "SHA256=${ARROW_S2N_TLS_BUILD_SHA256_CHECKSUM}" - CMAKE_ARGS ${S2N_TLS_CMAKE_ARGS} - BUILD_BYPRODUCTS ${S2N_TLS_STATIC_LIBRARY} - DEPENDS aws_lc_ep) - add_dependencies(AWS::s2n-tls s2n_tls_ep) + # For aws-sdk-cpp + # + # We need to use CACHE variables because aws-sdk-cpp < 12.0.0 uses + # CMP0077 OLD policy. We can use normal variables when we use + # aws-sdk-cpp >= 12.0.0. + set(AWS_SDK_WARNINGS_ARE_ERRORS + OFF + CACHE BOOL "" FORCE) + set(BUILD_DEPS + OFF + CACHE BOOL "" FORCE) + set(BUILD_ONLY + "" + CACHE STRING "" FORCE) + list(APPEND + BUILD_ONLY + config + core + identity-management + s3 + sts + transfer) + set(BUILD_SHARED_LIBS + OFF + CACHE BOOL "" FORCE) + set(ENABLE_TESTING + OFF + CACHE BOOL "" FORCE) + if(NOT WIN32) + set(ZLIB_INCLUDE_DIR + "$" + CACHE STRING "" FORCE) + set(ZLIB_LIBRARY + "$" + CACHE STRING "" FORCE) endif() + if(MINGW AND CMAKE_CXX_COMPILER_VERSION VERSION_LESS "9") + # This is for RTools 40. We can remove this after we dropped + # support for R < 4.2. schannel.h in RTools 40 is old. - externalproject_add(aws_c_cal_ep - ${EP_COMMON_OPTIONS} - URL ${AWS_C_CAL_SOURCE_URL} - URL_HASH "SHA256=${ARROW_AWS_C_CAL_BUILD_SHA256_CHECKSUM}" - CMAKE_ARGS ${AWSSDK_COMMON_CMAKE_ARGS} - BUILD_BYPRODUCTS ${AWS_C_CAL_STATIC_LIBRARY} - DEPENDS aws_c_common_ep) - add_dependencies(AWS::aws-c-cal aws_c_cal_ep) - - set(AWS_C_IO_DEPENDS aws_c_common_ep aws_c_cal_ep) - if(TARGET s2n_tls_ep) - list(APPEND AWS_C_IO_DEPENDS s2n_tls_ep) - endif() - externalproject_add(aws_c_io_ep - ${EP_COMMON_OPTIONS} - URL ${AWS_C_IO_SOURCE_URL} - URL_HASH "SHA256=${ARROW_AWS_C_IO_BUILD_SHA256_CHECKSUM}" - PATCH_COMMAND ${AWS_C_IO_PATCH_COMMAND} - CMAKE_ARGS ${AWSSDK_COMMON_CMAKE_ARGS} - BUILD_BYPRODUCTS ${AWS_C_IO_STATIC_LIBRARY} - DEPENDS ${AWS_C_IO_DEPENDS}) - add_dependencies(AWS::aws-c-io aws_c_io_ep) - - externalproject_add(aws_c_event_stream_ep - ${EP_COMMON_OPTIONS} - URL ${AWS_C_EVENT_STREAM_SOURCE_URL} - URL_HASH "SHA256=${ARROW_AWS_C_EVENT_STREAM_BUILD_SHA256_CHECKSUM}" - CMAKE_ARGS ${AWSSDK_COMMON_CMAKE_ARGS} - BUILD_BYPRODUCTS ${AWS_C_EVENT_STREAM_STATIC_LIBRARY} - DEPENDS aws_checksums_ep aws_c_io_ep) - add_dependencies(AWS::aws-c-event-stream aws_c_event_stream_ep) - - externalproject_add(aws_c_sdkutils_ep - ${EP_COMMON_OPTIONS} - URL ${AWS_C_SDKUTILS_SOURCE_URL} - URL_HASH "SHA256=${ARROW_AWS_C_SDKUTILS_BUILD_SHA256_CHECKSUM}" - CMAKE_ARGS ${AWSSDK_COMMON_CMAKE_ARGS} - BUILD_BYPRODUCTS ${AWS_C_SDKUTILS_STATIC_LIBRARY} - DEPENDS aws_c_common_ep) - add_dependencies(AWS::aws-c-sdkutils aws_c_sdkutils_ep) - - externalproject_add(aws_c_compression_ep - ${EP_COMMON_OPTIONS} - URL ${AWS_C_COMPRESSION_SOURCE_URL} - URL_HASH "SHA256=${ARROW_AWS_C_COMPRESSION_BUILD_SHA256_CHECKSUM}" - CMAKE_ARGS ${AWSSDK_COMMON_CMAKE_ARGS} - BUILD_BYPRODUCTS ${AWS_C_COMPRESSION_STATIC_LIBRARY} - DEPENDS aws_c_common_ep) - add_dependencies(AWS::aws-c-compression aws_c_compression_ep) - - externalproject_add(aws_c_http_ep - ${EP_COMMON_OPTIONS} - URL ${AWS_C_HTTP_SOURCE_URL} - URL_HASH "SHA256=${ARROW_AWS_C_HTTP_BUILD_SHA256_CHECKSUM}" - CMAKE_ARGS ${AWSSDK_COMMON_CMAKE_ARGS} - BUILD_BYPRODUCTS ${AWS_C_HTTP_STATIC_LIBRARY} - DEPENDS aws_c_io_ep aws_c_compression_ep) - add_dependencies(AWS::aws-c-http aws_c_http_ep) - - externalproject_add(aws_c_mqtt_ep - ${EP_COMMON_OPTIONS} - URL ${AWS_C_MQTT_SOURCE_URL} - URL_HASH "SHA256=${ARROW_AWS_C_MQTT_BUILD_SHA256_CHECKSUM}" - CMAKE_ARGS ${AWSSDK_COMMON_CMAKE_ARGS} - BUILD_BYPRODUCTS ${AWS_C_MQTT_STATIC_LIBRARY} - DEPENDS aws_c_http_ep) - add_dependencies(AWS::aws-c-mqtt aws_c_mqtt_ep) - - externalproject_add(aws_c_auth_ep - ${EP_COMMON_OPTIONS} - URL ${AWS_C_AUTH_SOURCE_URL} - URL_HASH "SHA256=${ARROW_AWS_C_AUTH_BUILD_SHA256_CHECKSUM}" - CMAKE_ARGS ${AWSSDK_COMMON_CMAKE_ARGS} - BUILD_BYPRODUCTS ${AWS_C_AUTH_STATIC_LIBRARY} - DEPENDS aws_c_sdkutils_ep aws_c_cal_ep aws_c_http_ep) - add_dependencies(AWS::aws-c-auth aws_c_auth_ep) - - externalproject_add(aws_c_s3_ep - ${EP_COMMON_OPTIONS} - URL ${AWS_C_S3_SOURCE_URL} - URL_HASH "SHA256=${ARROW_AWS_C_S3_BUILD_SHA256_CHECKSUM}" - CMAKE_ARGS ${AWSSDK_COMMON_CMAKE_ARGS} - BUILD_BYPRODUCTS ${AWS_C_S3_STATIC_LIBRARY} - DEPENDS aws_checksums_ep aws_c_auth_ep) - add_dependencies(AWS::aws-c-s3 aws_c_s3_ep) - - externalproject_add(aws_crt_cpp_ep - ${EP_COMMON_OPTIONS} - URL ${AWS_CRT_CPP_SOURCE_URL} - URL_HASH "SHA256=${ARROW_AWS_CRT_CPP_BUILD_SHA256_CHECKSUM}" - CMAKE_ARGS ${AWSSDK_CMAKE_ARGS} - BUILD_BYPRODUCTS ${AWS_CRT_CPP_STATIC_LIBRARY} - DEPENDS aws_c_auth_ep - aws_c_cal_ep - aws_c_common_ep - aws_c_event_stream_ep - aws_c_http_ep - aws_c_io_ep - aws_c_mqtt_ep - aws_c_s3_ep - aws_checksums_ep) - add_dependencies(AWS::aws-crt-cpp aws_crt_cpp_ep) - - externalproject_add(awssdk_ep - ${EP_COMMON_OPTIONS} - URL ${AWSSDK_SOURCE_URL} - URL_HASH "SHA256=${ARROW_AWSSDK_BUILD_SHA256_CHECKSUM}" - PATCH_COMMAND ${AWSSDK_PATCH_COMMAND} - CMAKE_ARGS ${AWSSDK_CMAKE_ARGS} - BUILD_BYPRODUCTS ${AWS_CPP_SDK_COGNITO_IDENTITY_STATIC_LIBRARY} - ${AWS_CPP_SDK_CORE_STATIC_LIBRARY} - ${AWS_CPP_SDK_IDENTITY_MANAGEMENT_STATIC_LIBRARY} - ${AWS_CPP_SDK_S3_STATIC_LIBRARY} - ${AWS_CPP_SDK_STS_STATIC_LIBRARY} - DEPENDS aws_crt_cpp_ep) - foreach(_AWSSDK_LIB ${_AWSSDK_LIBS}) - if(${_AWSSDK_LIB} MATCHES "^aws-cpp-sdk-") - add_dependencies(${_AWSSDK_LIB} awssdk_ep) + # For winhttp.h + # + # See also: + # https://learn.microsoft.com/en-us/windows/win32/winhttp/error-messages + string(APPEND CMAKE_CXX_FLAGS " -DERROR_WINHTTP_UNHANDLED_SCRIPT_TYPE=12176") + string(APPEND CMAKE_CXX_FLAGS " -DERROR_WINHTTP_SCRIPT_EXECUTION_ERROR=12177") + # See also: + # https://learn.microsoft.com/en-us/windows/win32/api/winhttp/ns-winhttp-winhttp_async_result + string(APPEND CMAKE_CXX_FLAGS " -DAPI_GET_PROXY_FOR_URL=6") + # See also: + # https://learn.microsoft.com/en-us/windows/win32/api/winhttp/nc-winhttp-winhttp_status_callback + string(APPEND CMAKE_CXX_FLAGS " -DWINHTTP_CALLBACK_STATUS_CLOSE_COMPLETE=0x02000000") + string(APPEND CMAKE_CXX_FLAGS + " -DWINHTTP_CALLBACK_STATUS_SHUTDOWN_COMPLETE=0x04000000") + # See also: + # https://learn.microsoft.com/en-us/windows/win32/winhttp/option-flags + string(APPEND CMAKE_CXX_FLAGS " -DWINHTTP_FLAG_SECURE_PROTOCOL_TLS1_2=0x00000800") + string(APPEND CMAKE_CXX_FLAGS " -DWINHTTP_NO_CLIENT_CERT_CONTEXT=0") + endif() + + set(AWSSDK_LINK_LIBRARIES) + foreach(AWSSDK_PRODUCT ${AWSSDK_PRODUCTS}) + if("${AWSSDK_PRODUCT}" STREQUAL "s2n-tls") + # Use aws-lc's openssl/*.h not openssl/*.h in system. + set(ADDITIONAL_FLAGS "-DCOMPILE_DEFINITIONS=-I${aws-lc_SOURCE_DIR}/include") endif() - endforeach() - - set(AWSSDK_VENDORED TRUE) - list(APPEND ARROW_BUNDLED_STATIC_LIBS ${AWSSDK_LIBRARIES}) - set(AWSSDK_LINK_LIBRARIES ${AWSSDK_LIBRARIES}) - if(UNIX) - # on Linux and macOS curl seems to be required - set_property(TARGET aws-cpp-sdk-core - APPEND - PROPERTY INTERFACE_LINK_LIBRARIES CURL::libcurl) - set_property(TARGET AWS::aws-c-cal - APPEND - PROPERTY INTERFACE_LINK_LIBRARIES OpenSSL::Crypto OpenSSL::SSL) - if(APPLE) - set_property(TARGET AWS::aws-c-cal - APPEND - PROPERTY INTERFACE_LINK_LIBRARIES "-framework Security") + fetchcontent_makeavailable(${AWSSDK_PRODUCT}) + if(CMAKE_VERSION VERSION_LESS 3.28) + set_property(DIRECTORY ${${AWSSDK_PRODUCT}_SOURCE_DIR} PROPERTY EXCLUDE_FROM_ALL + TRUE) endif() - if(ZLIB_VENDORED) - set_property(TARGET aws-cpp-sdk-core - APPEND - PROPERTY INTERFACE_LINK_LIBRARIES ZLIB::ZLIB) - add_dependencies(awssdk_ep zlib_ep) + list(PREPEND CMAKE_MODULE_PATH "${${AWSSDK_PRODUCT}_SOURCE_DIR}/cmake") + if(NOT "${AWSSDK_PRODUCT}" STREQUAL "aws-sdk-cpp") + if("${AWSSDK_PRODUCT}" STREQUAL "aws-lc") + # We don't need to link aws-lc. It's used only by s2n-tls. + elseif("${AWSSDK_PRODUCT}" STREQUAL "s2n-tls") + list(PREPEND AWSSDK_LINK_LIBRARIES s2n) + else() + list(PREPEND AWSSDK_LINK_LIBRARIES ${AWSSDK_PRODUCT}) + # This is for find_package(aws-*) in aws-crt-cpp and aws-sdk-cpp. + add_library(AWS::${AWSSDK_PRODUCT} ALIAS ${AWSSDK_PRODUCT}) + endif() endif() - set_property(TARGET AWS::aws-c-io - APPEND - PROPERTY INTERFACE_LINK_LIBRARIES ${CMAKE_DL_LIBS}) - elseif(WIN32) - set_property(TARGET aws-cpp-sdk-core - APPEND - PROPERTY INTERFACE_LINK_LIBRARIES - "winhttp.lib" - "bcrypt.lib" - "wininet.lib" - "userenv.lib" - "version.lib") - set_property(TARGET AWS::aws-c-cal - APPEND - PROPERTY INTERFACE_LINK_LIBRARIES - "bcrypt.lib" - "ncrypt.lib" - "Secur32.lib" - "Shlwapi.lib") - set_property(TARGET AWS::aws-c-io - APPEND - PROPERTY INTERFACE_LINK_LIBRARIES "crypt32.lib") - endif() - - # AWSSDK is static-only build -endmacro() + endforeach() + list(PREPEND + AWSSDK_LINK_LIBRARIES + aws-cpp-sdk-identity-management + aws-cpp-sdk-sts + aws-cpp-sdk-cognito-identity + aws-cpp-sdk-s3 + aws-cpp-sdk-core) + + set(AWSSDK_VENDORED + TRUE + PARENT_SCOPE) + list(APPEND ARROW_BUNDLED_STATIC_LIBS ${AWSSDK_LINK_LIBRARIES}) + set(ARROW_BUNDLED_STATIC_LIBS + ${ARROW_BUNDLED_STATIC_LIBS} + PARENT_SCOPE) + set(AWSSDK_LINK_LIBRARIES + ${AWSSDK_LINK_LIBRARIES} + PARENT_SCOPE) +endfunction() if(ARROW_S3) resolve_dependency(AWSSDK HAVE_ALT TRUE) @@ -5510,16 +5297,6 @@ if(ARROW_S3) endif() endif() endif() - - if(APPLE) - # CoreFoundation's path is hardcoded in the CMake files provided by - # aws-sdk-cpp to use the macOS SDK provided by XCode which makes - # XCode a hard dependency. Command Line Tools is often used instead - # of the full XCode suite, so let the linker to find it. - set_target_properties(AWS::aws-c-common - PROPERTIES INTERFACE_LINK_LIBRARIES - "-pthread;pthread;-framework CoreFoundation") - endif() endif() # ---------------------------------------------------------------------- diff --git a/cpp/cmake_modules/aws-c-common-1208.patch b/cpp/cmake_modules/aws-c-common-1208.patch new file mode 100644 index 00000000000..72a5f70776b --- /dev/null +++ b/cpp/cmake_modules/aws-c-common-1208.patch @@ -0,0 +1,83 @@ +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +From 06c435c6765833734c62ac45909b5ab1fc5276a7 Mon Sep 17 00:00:00 2001 +From: Sutou Kouhei +Date: Sat, 7 Jun 2025 20:40:36 +0900 +Subject: [PATCH] Use _WIN32 not _MSC_VER for MinGW + +This was missed in https://github.com/awslabs/aws-c-common/pull/801 +and https://github.com/awslabs/aws-c-common/pull/822 . + +MSYS2 also includes this change: +https://github.com/msys2/MINGW-packages/blob/master/mingw-w64-aws-c-common/001-fix-build-on-mingw-aarch64.patch + +Error message without this: + +https://github.com/ursacomputing/crossbow/actions/runs/15502494926/job/43652596580#step:7:5884 + + cd /D/a/crossbow/crossbow/src/build-i686-cpp/_deps/aws-c-common-build && /D/a/crossbow/crossbow/sccache/sccache.exe /C/rtools40/mingw32/bin/cc.exe -DARROW_HAVE_RUNTIME_SSE4_2 -DARROW_HAVE_SSE4_2 -DARROW_WITH_TIMING_TESTS -DAWS_AFFINITY_METHOD=AWS_AFFINITY_METHOD_NONE -DCJSON_HIDE_SYMBOLS -DINTEL_NO_ITTNOTIFY_API -DPSAPI_VERSION=1 -DWINDOWS_KERNEL_LIB=kernel32 -D_CRT_SECURE_NO_WARNINGS @CMakeFiles/aws-c-common.dir/includes_C.rsp -O3 -DNDEBUG -O2 -ftree-vectorize -std=gnu99 -fvisibility=hidden -Wall -Wstrict-prototypes -pedantic -Wno-long-long -fPIC -D_FILE_OFFSET_BITS=64 -MD -MT _deps/aws-c-common-build/CMakeFiles/aws-c-common.dir/source/allocator.c.obj -MF CMakeFiles/aws-c-common.dir/source/allocator.c.obj.d -o CMakeFiles/aws-c-common.dir/source/allocator.c.obj -c /D/a/crossbow/crossbow/src/build-i686-cpp/_deps/aws-c-common-src/source/allocator.c + In file included from D:/a/crossbow/crossbow/src/build-i686-cpp/_deps/aws-c-common-src/include/aws/common/byte_order.h:71, + from D:/a/crossbow/crossbow/src/build-i686-cpp/_deps/aws-c-common-src/include/aws/common/byte_buf.h:9, + from D:/a/crossbow/crossbow/src/build-i686-cpp/_deps/aws-c-common-src/include/aws/common/thread.h:8, + from D:/a/crossbow/crossbow/src/build-i686-cpp/_deps/aws-c-common-src/include/aws/common/logging.h:11, + from D:/a/crossbow/crossbow/src/build-i686-cpp/_deps/aws-c-common-src/source/allocator.c:8: + D:/a/crossbow/crossbow/src/build-i686-cpp/_deps/aws-c-common-src/include/aws/common/byte_order.inl: In function 'aws_hton64': + D:/a/crossbow/crossbow/src/build-i686-cpp/_deps/aws-c-common-src/include/aws/common/byte_order.inl:47:23: warning: implicit declaration of function 'htonl'; did you mean 'wtoll'? [-Wimplicit-function-declaration] + return ((uint64_t)htonl(low)) << 32 | htonl(high); + ^~~~~ + wtoll + In file included from C:/rtools40/mingw32/i686-w64-mingw32/include/windows.h:92, + from D:/a/crossbow/crossbow/src/build-i686-cpp/_deps/aws-c-common-src/source/allocator.c:15: + C:/rtools40/mingw32/i686-w64-mingw32/include/winsock.h: At top level: + C:/rtools40/mingw32/i686-w64-mingw32/include/winsock.h:286:37: error: conflicting types for 'htonl' + WINSOCK_API_LINKAGE u_long WSAAPI htonl(u_long hostlong); + ^~~~~ + In file included from D:/a/crossbow/crossbow/src/build-i686-cpp/_deps/aws-c-common-src/include/aws/common/byte_order.h:71, + from D:/a/crossbow/crossbow/src/build-i686-cpp/_deps/aws-c-common-src/include/aws/common/byte_buf.h:9, + from D:/a/crossbow/crossbow/src/build-i686-cpp/_deps/aws-c-common-src/include/aws/common/thread.h:8, + from D:/a/crossbow/crossbow/src/build-i686-cpp/_deps/aws-c-common-src/include/aws/common/logging.h:11, + from D:/a/crossbow/crossbow/src/build-i686-cpp/_deps/aws-c-common-src/source/allocator.c:8: + D:/a/crossbow/crossbow/src/build-i686-cpp/_deps/aws-c-common-src/include/aws/common/byte_order.inl:47:23: note: previous implicit declaration of 'htonl' was here + return ((uint64_t)htonl(low)) << 32 | htonl(high); + ^~~~~ +--- + include/aws/common/byte_order.inl | 4 ++-- + 1 file changed, 2 insertions(+), 2 deletions(-) + +diff --git a/include/aws/common/byte_order.inl b/include/aws/common/byte_order.inl +index 1204be06a1..0abd9cb8cd 100644 +--- a/include/aws/common/byte_order.inl ++++ b/include/aws/common/byte_order.inl +@@ -13,7 +13,7 @@ + # include + #else + # include +-#endif /* _MSC_VER */ ++#endif /* _WIN32 */ + + AWS_EXTERN_C_BEGIN + +@@ -39,7 +39,7 @@ AWS_STATIC_IMPL uint64_t aws_hton64(uint64_t x) { + uint64_t v; + __asm__("bswap %q0" : "=r"(v) : "0"(x)); + return v; +-#elif defined(_MSC_VER) ++#elif defined(_WIN32) + return _byteswap_uint64(x); + #else + uint32_t low = x & UINT32_MAX; diff --git a/cpp/cmake_modules/aws_sdk_cpp_generate_variables.sh b/cpp/cmake_modules/aws_sdk_cpp_generate_variables.sh deleted file mode 100755 index 79b560a4a14..00000000000 --- a/cpp/cmake_modules/aws_sdk_cpp_generate_variables.sh +++ /dev/null @@ -1,72 +0,0 @@ -#!/bin/bash -# -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -set -eu - -version=$1 - -base_dir="$(dirname "$0")" -output="${base_dir}/AWSSDKVariables.cmake" - -cat <
${output} -# Licensed to the Apache Software Foundation (ASF) under one -# or more contributor license agreements. See the NOTICE file -# distributed with this work for additional information -# regarding copyright ownership. The ASF licenses this file -# to you under the Apache License, Version 2.0 (the -# "License"); you may not use this file except in compliance -# with the License. You may obtain a copy of the License at -# -# http://www.apache.org/licenses/LICENSE-2.0 -# -# Unless required by applicable law or agreed to in writing, -# software distributed under the License is distributed on an -# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY -# KIND, either express or implied. See the License for the -# specific language governing permissions and limitations -# under the License. - -# Generated by: -# $ cpp/cmake_modules/aws_sdk_cpp_generate_variables.sh ${version} - -HEADER - -rm -f ${version}.tar.gz -wget https://github.com/aws/aws-sdk-cpp/archive/${version}.tar.gz -base_name=aws-sdk-cpp-${version} -rm -rf ${base_name} -tar xf ${version}.tar.gz - -echo "set(AWSSDK_UNUSED_DIRECTORIES" >> ${output} -find ${base_name} -mindepth 1 -maxdepth 1 -type d | \ - sort | \ - grep -v cmake | \ - grep -v toolchains | \ - grep -v aws-cpp-sdk-cognito-identity | \ - grep -v aws-cpp-sdk-core | \ - grep -v aws-cpp-sdk-config | \ - grep -v aws-cpp-sdk-s3 | \ - grep -v aws-cpp-sdk-transfer | \ - grep -v aws-cpp-sdk-identity-management | \ - grep -v aws-cpp-sdk-sts | \ - sed -E -e "s,^${base_name}/, ,g" >> ${output} -echo ")" >> ${output} - -rm -rf ${base_name} -rm -f ${version}.tar.gz diff --git a/cpp/thirdparty/README.md b/cpp/thirdparty/README.md index 81806810530..32b216861f1 100644 --- a/cpp/thirdparty/README.md +++ b/cpp/thirdparty/README.md @@ -23,3 +23,19 @@ See the "Build Dependency Management" section in the [C++ Developer Documentation][1]. [1]: https://github.com/apache/arrow/blob/main/docs/source/developers/cpp/building.rst + +## Update versions automatically + +There is a convenient script that update versions in `versions.txt` to +the latest version automatically. You can use it like the following: + +```console +cpp/thirdparty/update.rb PRODUCT_PATTERN1 PRODUCT_PATTERN2 ... +``` + +For example, you can update AWS SDK for C++ related products' versions +by the following command line: + +```console +cpp/thirdparty/update.rb "AWS*" "S2N*" +``` diff --git a/cpp/thirdparty/update.rb b/cpp/thirdparty/update.rb new file mode 100755 index 00000000000..77f22de04fb --- /dev/null +++ b/cpp/thirdparty/update.rb @@ -0,0 +1,189 @@ +#!/usr/bin/env ruby +# +# Licensed to the Apache Software Foundation (ASF) under one +# or more contributor license agreements. See the NOTICE file +# distributed with this work for additional information +# regarding copyright ownership. The ASF licenses this file +# to you under the Apache License, Version 2.0 (the +# "License"); you may not use this file except in compliance +# with the License. You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, +# software distributed under the License is distributed on an +# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY +# KIND, either express or implied. See the License for the +# specific language governing permissions and limitations +# under the License. + +require "digest/sha2" +require "json" +require "open-uri" +require "optparse" + +option_parser = OptionParser.new +option_parser.banner = + "Usage: #{$0} [options] PRODUCT_PATTERN1 PRODUCT_PATTERN2 ..." +patterns = option_parser.parse!(ARGV) +if patterns.empty? + puts(option_parser) + exit(false) +end + +# Extract product information from the `cpp/thirdparty/versions.txt` +# content. +# +# Output: +# +# { +# "ABSL" => { +# version: "20211102.0", +# checksum: "dcf71b9cba8dc0ca9940c4b316a0c796be8fab42b070bb6b7cab62b48f0e66c4", +# url_template: "https://github.com/abseil/abseil-cpp/archive/%{version}.tar.gz" +# }, +# "AWS_C_AUTH" => { +# version: "v0.9.0", +# checksum: "aa6e98864fefb95c249c100da4ae7aed36ba13a8a91415791ec6fad20bec0427", +# url_template: "https://github.com/awslabs/aws-c-auth/archive/%{version}.tar.gz", +# }, +# ... +# } +def parse_versions_txt_content(content) + products = {} + content.each_line(chomp: true) do |line| + case line + when /\AARROW_([A-Za-z0-9_-]+)_BUILD_VERSION=(.+?)\z/ + product = Regexp.last_match[1] + version = Regexp.last_match[2] + products[product] = {version: version} + when /\AARROW_([A-Za-z0-9_-]+)_BUILD_SHA256_CHECKSUM=(.+?)\z/ + product = Regexp.last_match[1] + checksum = Regexp.last_match[2] + products[product][:checksum] = checksum + when /\A "ARROW_([A-Za-z0-9_-]+)_URL (?:\S+) (\S+)"\z/ + product = Regexp.last_match[1] + url_template = Regexp.last_match[2] + url_template.gsub!(/\${.+?}/) do |matched| + if matched.end_with?("//./_}") + "%{version_underscore}" + else + "%{version}" + end + end + products[product][:url_template] = url_template + end + end + products +end + +# Update `metadata[:version]` and `metadata[:checksum]` for +# `latest_version`. +# +# This is used by product specific `#update_product_*` such as +# `#update_product_github` and `#update_product_apache`. +def update_product_generic(product, metadata, latest_version) + version = metadata[:version] + url_template = metadata[:url_template] + url = url_template % { + version: latest_version, + version_underscore: latest_version.gsub(".", "_"), + } + $stderr.puts("Updating #{product}: #{version} -> #{latest_version}") + metadata[:version] = latest_version + URI.open(url, "rb") do |response| + metadata[:checksum] = Digest::SHA256.hexdigest(response.read) + end + $stderr.puts(" Checksum: #{metadata[:checksum]}") +end + +# Update metadata to the latest version. This is for products hosted +# on GitHub. +def update_product_github(product, metadata, repository) + version = metadata[:version] + tags_url = "https://api.github.com/repos/#{repository}/tags" + tags = URI.open(tags_url) do |response| + JSON.parse(response.read) + end + latest_tag_name = tags[0]["name"] + if latest_tag_name.start_with?("v") + if metadata[:version].start_with?("v") + latest_version = latest_tag_name + else + latest_version = latest_tag_name[1..-1] + end + else + latest_version = latest_tag_name + end + return if version == latest_version + + update_product_generic(product, metadata, latest_version) +end + +# Update metadata to the latest version. This is for products +# developed by Apache Software Foundation. +def update_product_apache(product, metadata, project) + version = metadata[:version] + version_directory_pattern = metadata[:version_directory_template] % { + version: "(\\d+(?:\\.\\d+)+)", + } + versions = URI.open("https://downloads.apache.org/#{project}/") do |response| + response.read.scan(//).flatten + end + latest_version = versions.last + return if version == latest_version + + update_product_generic(product, metadata, latest_version) +end + +# Update one product to the latest version. +def update_product(product, metadata) + url_template = metadata[:url_template] + if url_template.nil? + $stderr.puts("#{product} isn't supported " + + "because there is no associated URL") + return + end + + case url_template + when /\Ahttps:\/\/github\.com\/((?:[^\/]+)\/(?:[^\/]+))\// + github_repository = Regexp.last_match[1] + update_product_github(product, metadata, github_repository) + when /\Ahttps:\/\/www\.apache\.org\/dyn\/closer\.lua\/ + ((?:[^\/]+))\/((?:[^\/]+))\//x + apache_project = Regexp.last_match[1] + metadata[:version_directory_template] = Regexp.last_match[2] + update_product_apache(product, metadata, apache_project) + else + $stderr.puts("TODO: #{product} isn't supported yet: #{url_template}") + end +end + +# Update `versions.txt` content with `products`. `products` must be +# the same structure as `Hash` returned by +# `#parse_versions_txt_content`. +def update_versions_txt_content!(content, products) + products.each do |product, metadata| + prefix = "ARROW_#{Regexp.escape(product)}" + content.gsub!(/^#{prefix}_BUILD_VERSION=.*$/) do + "ARROW_#{product}_BUILD_VERSION=#{metadata[:version]}" + end + content.gsub!(/^#{prefix}_BUILD_SHA256_CHECKSUM=.*?$/) do + "ARROW_#{product}_BUILD_SHA256_CHECKSUM=#{metadata[:checksum]}" + end + end +end + +versions_txt = File.join(__dir__, "versions.txt") +versions_txt_content = File.read(versions_txt) +products = parse_versions_txt_content(versions_txt_content) +patterns.each do |pattern| + target_products = products.filter do |product, _| + File.fnmatch?(pattern, product) + end + target_products.each do |product, metadata| + update_product(product, metadata) + end +end +update_versions_txt_content!(versions_txt_content, products) +File.write(versions_txt, versions_txt_content) diff --git a/cpp/thirdparty/versions.txt b/cpp/thirdparty/versions.txt index 1fc53c4d5e6..96ae7c6f035 100644 --- a/cpp/thirdparty/versions.txt +++ b/cpp/thirdparty/versions.txt @@ -25,34 +25,34 @@ ARROW_ABSL_BUILD_VERSION=20211102.0 ARROW_ABSL_BUILD_SHA256_CHECKSUM=dcf71b9cba8dc0ca9940c4b316a0c796be8fab42b070bb6b7cab62b48f0e66c4 -ARROW_AWS_C_AUTH_BUILD_VERSION=v0.6.22 -ARROW_AWS_C_AUTH_BUILD_SHA256_CHECKSUM=691a6b4418afcd3dc141351b6ad33fccd8e3ff84df0e9e045b42295d284ee14c -ARROW_AWS_C_CAL_BUILD_VERSION=v0.5.20 -ARROW_AWS_C_CAL_BUILD_SHA256_CHECKSUM=acc352359bd06f8597415c366cf4ec4f00d0b0da92d637039a73323dd55b6cd0 -ARROW_AWS_C_COMMON_BUILD_VERSION=v0.8.9 -ARROW_AWS_C_COMMON_BUILD_SHA256_CHECKSUM=2f3fbaf7c38eae5a00e2a816d09b81177f93529ae8ba1b82dc8f31407565327a -ARROW_AWS_C_COMPRESSION_BUILD_VERSION=v0.2.16 -ARROW_AWS_C_COMPRESSION_BUILD_SHA256_CHECKSUM=044b1dbbca431a07bde8255ef9ec443c300fc60d4c9408d4b862f65e496687f4 -ARROW_AWS_C_EVENT_STREAM_BUILD_VERSION=v0.2.18 -ARROW_AWS_C_EVENT_STREAM_BUILD_SHA256_CHECKSUM=310ca617f713bf664e4c7485a3d42c1fb57813abd0107e49790d107def7cde4f -ARROW_AWS_C_HTTP_BUILD_VERSION=v0.7.3 -ARROW_AWS_C_HTTP_BUILD_SHA256_CHECKSUM=07e16c6bf5eba6f0dea96b6f55eae312a7c95b736f4d2e4a210000f45d8265ae -ARROW_AWS_C_IO_BUILD_VERSION=v0.13.14 -ARROW_AWS_C_IO_BUILD_SHA256_CHECKSUM=12b66510c3d9a4f7e9b714e9cfab2a5bf835f8b9ce2f909d20ae2a2128608c71 -ARROW_AWS_C_MQTT_BUILD_VERSION=v0.8.4 -ARROW_AWS_C_MQTT_BUILD_SHA256_CHECKSUM=232eeac63e72883d460c686a09b98cdd811d24579affac47c5c3f696f956773f -ARROW_AWS_C_S3_BUILD_VERSION=v0.2.3 -ARROW_AWS_C_S3_BUILD_SHA256_CHECKSUM=a00b3c9f319cd1c9aa2c3fa15098864df94b066dcba0deaccbb3caa952d902fe -ARROW_AWS_C_SDKUTILS_BUILD_VERSION=v0.1.6 -ARROW_AWS_C_SDKUTILS_BUILD_SHA256_CHECKSUM=8a2951344b2fb541eab1e9ca17c18a7fcbfd2aaff4cdd31d362d1fad96111b91 -ARROW_AWS_CHECKSUMS_BUILD_VERSION=v0.1.13 -ARROW_AWS_CHECKSUMS_BUILD_SHA256_CHECKSUM=0f897686f1963253c5069a0e495b85c31635ba146cd3ac38cc2ea31eaf54694d -ARROW_AWS_CRT_CPP_BUILD_VERSION=v0.18.16 -ARROW_AWS_CRT_CPP_BUILD_SHA256_CHECKSUM=9e69bc1dc4b50871d1038aa9ff6ddeb4c9b28f7d6b5e5b1b69041ccf50a13483 -ARROW_AWS_LC_BUILD_VERSION=v1.3.0 -ARROW_AWS_LC_BUILD_SHA256_CHECKSUM=ae96a3567161552744fc0cae8b4d68ed88b1ec0f3d3c98700070115356da5a37 -ARROW_AWSSDK_BUILD_VERSION=1.10.55 -ARROW_AWSSDK_BUILD_SHA256_CHECKSUM=2d552fb1a84bef4a9b65e34aa7031851ed2aef5319e02cc6e4cb735c48aa30de +ARROW_AWS_C_AUTH_BUILD_VERSION=v0.9.0 +ARROW_AWS_C_AUTH_BUILD_SHA256_CHECKSUM=aa6e98864fefb95c249c100da4ae7aed36ba13a8a91415791ec6fad20bec0427 +ARROW_AWS_C_CAL_BUILD_VERSION=v0.9.2 +ARROW_AWS_C_CAL_BUILD_SHA256_CHECKSUM=f9f3bc6a069e2efe25fcdf73e4d2b16b5608c327d2eb57c8f7a8524e9e1fcad0 +ARROW_AWS_C_COMMON_BUILD_VERSION=v0.12.3 +ARROW_AWS_C_COMMON_BUILD_SHA256_CHECKSUM=a4e7ac6c6f840cb6ab56b8ee0bcd94a61c59d68ca42570bca518432da4c94273 +ARROW_AWS_C_COMPRESSION_BUILD_VERSION=v0.3.1 +ARROW_AWS_C_COMPRESSION_BUILD_SHA256_CHECKSUM=d89fca17a37de762dc34f332d2da402343078da8dbd2224c46a11a88adddf754 +ARROW_AWS_C_EVENT_STREAM_BUILD_VERSION=v0.5.4 +ARROW_AWS_C_EVENT_STREAM_BUILD_SHA256_CHECKSUM=cef8b78e362836d15514110fb43a0a0c7a86b0a210d5fe25fd248a82027a7272 +ARROW_AWS_C_HTTP_BUILD_VERSION=v0.10.2 +ARROW_AWS_C_HTTP_BUILD_SHA256_CHECKSUM=048d9d683459ade363fd7cc448c2b6329c78f67a2a0c0cb61c16de4634a2fc6b +ARROW_AWS_C_IO_BUILD_VERSION=v0.19.1 +ARROW_AWS_C_IO_BUILD_SHA256_CHECKSUM=f2fea0c066924f7fe3c2b1c7b2fa9be640f5b16a6514854226330e63a1faacd0 +ARROW_AWS_C_MQTT_BUILD_VERSION=v0.13.1 +ARROW_AWS_C_MQTT_BUILD_SHA256_CHECKSUM=c54d02c1e46f55bae8d5e6f9c4b0d78d84c1c9d9ac16ba8d78c3361edcd8b5bb +ARROW_AWS_C_S3_BUILD_VERSION=v0.8.1 +ARROW_AWS_C_S3_BUILD_SHA256_CHECKSUM=c8b09780691d2b94e50d101c68f01fa2d1c3debb0ff3aed313d93f0d3c9af663 +ARROW_AWS_C_SDKUTILS_BUILD_VERSION=v0.2.4 +ARROW_AWS_C_SDKUTILS_BUILD_SHA256_CHECKSUM=493cbed4fa57e0d4622fcff044e11305eb4fc12445f32c8861025597939175fc +ARROW_AWS_CHECKSUMS_BUILD_VERSION=v0.2.7 +ARROW_AWS_CHECKSUMS_BUILD_SHA256_CHECKSUM=178e8398d98111f29150f7813a70c20ad97ab30be0de02525440355fe84ccb1d +ARROW_AWS_CRT_CPP_BUILD_VERSION=v0.32.8 +ARROW_AWS_CRT_CPP_BUILD_SHA256_CHECKSUM=db44260452a0296341fb8e7b987e4c328f08f7829b9f1c740fed9c963e081e93 +ARROW_AWS_LC_BUILD_VERSION=v1.52.1 +ARROW_AWS_LC_BUILD_SHA256_CHECKSUM=fe552e3c3522f73afc3c30011745c431c633f7b4e25dcd7b38325f194a7b3b75 +ARROW_AWSSDK_BUILD_VERSION=1.11.587 +ARROW_AWSSDK_BUILD_SHA256_CHECKSUM=b9944ba9905a68d6e53abb4f36ab2b3bd18ac88d8571647bb9f2b8026b76f8cd # Despite the confusing version name this is still the whole Azure SDK for C++ including core, keyvault, storage-common, etc. ARROW_AZURE_SDK_BUILD_VERSION=azure-identity_1.9.0 ARROW_AZURE_SDK_BUILD_SHA256_CHECKSUM=97065bfc971ac8df450853ce805f820f52b59457bd7556510186a1569502e4a1 @@ -105,8 +105,8 @@ ARROW_SNAPPY_BUILD_VERSION=1.2.2 ARROW_SNAPPY_BUILD_SHA256_CHECKSUM=90f74bc1fbf78a6c56b3c4a082a05103b3a56bb17bca1a27e052ea11723292dc ARROW_SUBSTRAIT_BUILD_VERSION=v0.44.0 ARROW_SUBSTRAIT_BUILD_SHA256_CHECKSUM=f989a862f694e7dbb695925ddb7c4ce06aa6c51aca945105c075139aed7e55a2 -ARROW_S2N_TLS_BUILD_VERSION=v1.3.35 -ARROW_S2N_TLS_BUILD_SHA256_CHECKSUM=9d32b26e6bfcc058d98248bf8fc231537e347395dd89cf62bb432b55c5da990d +ARROW_S2N_TLS_BUILD_VERSION=v1.5.21 +ARROW_S2N_TLS_BUILD_SHA256_CHECKSUM=203d69d6f557f6ab303438ad186fca13fd2c60581b2cca6348a9fbee10d79995 ARROW_THRIFT_BUILD_VERSION=0.20.0 ARROW_THRIFT_BUILD_SHA256_CHECKSUM=b5d8311a779470e1502c027f428a1db542f5c051c8e1280ccd2163fa935ff2d6 ARROW_UTF8PROC_BUILD_VERSION=v2.10.0 diff --git a/python/pyarrow/tests/test_fs.py b/python/pyarrow/tests/test_fs.py index 2838372bd17..e90613a6626 100644 --- a/python/pyarrow/tests/test_fs.py +++ b/python/pyarrow/tests/test_fs.py @@ -2043,6 +2043,8 @@ def test_concurrent_s3fs_init(): @pytest.mark.s3 +@pytest.mark.skip(reason="atexit(ensure_s3_finalized) will be called too late " + "with bundled aws-sdk-cpp 1.11.587") @pytest.mark.skipif(running_on_musllinux(), reason="Leaking S3ClientFinalizer causes " "segfault on musl based systems") def test_uwsgi_integration(): From 3bc33123a8671194e15b92ca3aeb90303fa792e7 Mon Sep 17 00:00:00 2001 From: Dewey Dunnington Date: Tue, 17 Jun 2025 03:11:24 -0500 Subject: [PATCH 62/63] GH-44500: [Python][Parquet] Map Parquet logical types to Arrow extension types by default (#46772) ### Rationale for this change The Parquet C++ implementation now supports reading four logical types (JSON, UUID, Geometry, Geography) as Arrow extension types; however, users have to opt-in to avoid loosing the logical type on read. ### What changes are included in this PR? This PR sets the default value of `arrow_extensions_enabled` to `True` (in Python). ### Are these changes tested? Yes, the behaviour of `arrow_extensions_enabled` was already tested (and tests were updated to reflect the new default value). ### Are there any user-facing changes? **This PR includes breaking changes to public APIs.** Reading Parquet files that contained a JSON or UUID logical type will now have an extension type rather than string or fixed size binary, respectively. Python users that were relying on the previous behaviour would have to explicitly cast to storage or use `read_table(..., arrow_extensions_enabled=False)` after this PR: ```python import uuid import pyarrow as pa json_array = pa.array(['{"k": "v"}'], pa.json_()) json_array.cast(pa.string()) #> [ #> "{"k": "v"}" #> ] uuid_array = pa.array([uuid.uuid4().bytes], pa.uuid()) uuid_array.cast(pa.binary(16)) #> #> [ #> 746C1022AB434A97972E1707EC3EE8F4 #> ] ``` * GitHub Issue: #44500 Authored-by: Dewey Dunnington Signed-off-by: AlenkaF --- python/pyarrow/parquet/core.py | 12 ++++++------ python/pyarrow/tests/parquet/test_data_types.py | 7 +++++-- 2 files changed, 11 insertions(+), 8 deletions(-) diff --git a/python/pyarrow/parquet/core.py b/python/pyarrow/parquet/core.py index a84fd5e8b7a..e98a8133954 100644 --- a/python/pyarrow/parquet/core.py +++ b/python/pyarrow/parquet/core.py @@ -261,7 +261,7 @@ class ParquetFile: it will be parsed as an URI to determine the filesystem. page_checksum_verification : bool, default False If True, verify the checksum for each page read from the file. - arrow_extensions_enabled : bool, default False + arrow_extensions_enabled : bool, default True If True, read Parquet logical types as Arrow extension types where possible, (e.g., read JSON as the canonical `arrow.json` extension type or UUID as the canonical `arrow.uuid` extension type). @@ -314,7 +314,7 @@ def __init__(self, source, *, metadata=None, common_metadata=None, coerce_int96_timestamp_unit=None, decryption_properties=None, thrift_string_size_limit=None, thrift_container_size_limit=None, filesystem=None, - page_checksum_verification=False, arrow_extensions_enabled=False): + page_checksum_verification=False, arrow_extensions_enabled=True): self._close_source = getattr(source, 'closed', True) @@ -1321,7 +1321,7 @@ class ParquetDataset: sufficient for most Parquet files. page_checksum_verification : bool, default False If True, verify the page checksum for each page read from the file. -arrow_extensions_enabled : bool, default False +arrow_extensions_enabled : bool, default True If True, read Parquet logical types as Arrow extension types where possible, (e.g., read JSON as the canonical `arrow.json` extension type or UUID as the canonical `arrow.uuid` extension type). @@ -1339,7 +1339,7 @@ def __init__(self, path_or_paths, filesystem=None, schema=None, *, filters=None, decryption_properties=None, thrift_string_size_limit=None, thrift_container_size_limit=None, page_checksum_verification=False, - arrow_extensions_enabled=False): + arrow_extensions_enabled=True): import pyarrow.dataset as ds # map format arguments @@ -1739,7 +1739,7 @@ def partitioning(self): sufficient for most Parquet files. page_checksum_verification : bool, default False If True, verify the checksum for each page read from the file. -arrow_extensions_enabled : bool, default False +arrow_extensions_enabled : bool, default True If True, read Parquet logical types as Arrow extension types where possible, (e.g., read JSON as the canonical `arrow.json` extension type or UUID as the canonical `arrow.uuid` extension type). @@ -1839,7 +1839,7 @@ def read_table(source, *, columns=None, use_threads=True, decryption_properties=None, thrift_string_size_limit=None, thrift_container_size_limit=None, page_checksum_verification=False, - arrow_extensions_enabled=False): + arrow_extensions_enabled=True): try: dataset = ParquetDataset( diff --git a/python/pyarrow/tests/parquet/test_data_types.py b/python/pyarrow/tests/parquet/test_data_types.py index 351221f64df..c546bc1532a 100644 --- a/python/pyarrow/tests/parquet/test_data_types.py +++ b/python/pyarrow/tests/parquet/test_data_types.py @@ -569,6 +569,7 @@ def test_json_extension_type(storage_type): _check_roundtrip( table, pa.table({"ext": pa.array(data, pa.string())}), + {"arrow_extensions_enabled": False}, store_schema=False) # With arrow_extensions_enabled=True on read, we get a arrow.json back @@ -576,7 +577,7 @@ def test_json_extension_type(storage_type): _check_roundtrip( table, pa.table({"ext": pa.array(data, pa.json_(pa.string()))}), - read_table_kwargs={"arrow_extensions_enabled": True}, + {"arrow_extensions_enabled": True}, store_schema=False) @@ -594,11 +595,13 @@ def test_uuid_extension_type(): _check_roundtrip( table, pa.table({"ext": pa.array(data, pa.binary(16))}), + {"arrow_extensions_enabled": False}, store_schema=False) _check_roundtrip( table, table, - {"arrow_extensions_enabled": True}, store_schema=False) + {"arrow_extensions_enabled": True}, + store_schema=False) def test_undefined_logical_type(parquet_test_datadir): From 68efc7bc8346806844f6d968248950be298174cb Mon Sep 17 00:00:00 2001 From: "Alina (Xi) Li" Date: Mon, 16 Jun 2025 17:38:43 -0700 Subject: [PATCH 63/63] Initialize Kernel functions As per changes from #46261, we need to initialize Kernel library explicitly to get the functions registered --- .../flight/sql/odbc/flight_sql/CMakeLists.txt | 2 +- .../sql/odbc/flight_sql/flight_sql_driver.cc | 4 ++++ .../flight/sql/odbc/flight_sql/utils_test.cc | 16 ++++++++++++---- 3 files changed, 17 insertions(+), 5 deletions(-) diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/CMakeLists.txt b/cpp/src/arrow/flight/sql/odbc/flight_sql/CMakeLists.txt index e9f282e91f9..895eff8ed9a 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/CMakeLists.txt +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/CMakeLists.txt @@ -102,7 +102,7 @@ if(WIN32) endif() target_link_libraries(arrow_odbc_spi_impl PUBLIC odbcabstraction arrow_flight_sql_shared - Boost::locale) + arrow_compute_shared Boost::locale) # Link libraries on MINGW64 only if(MINGW AND CMAKE_CXX_COMPILER_ID STREQUAL "GNU") diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_driver.cc b/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_driver.cc index cb0e5c5ae5c..0736dac8486 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_driver.cc +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/flight_sql_driver.cc @@ -16,7 +16,9 @@ // under the License. #include "arrow/flight/sql/odbc/flight_sql/include/flight_sql/flight_sql_driver.h" +#include "arrow/compute/api.h" #include "arrow/flight/sql/odbc/flight_sql/flight_sql_connection.h" +#include "arrow/flight/sql/odbc/flight_sql/utils.h" #include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/platform.h" #include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/spd_logger.h" #include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/utils.h" @@ -56,6 +58,8 @@ LogLevel ToLogLevel(int64_t level) { FlightSqlDriver::FlightSqlDriver() : diagnostics_("Apache Arrow", "Flight SQL", OdbcVersion::V_3), version_("0.9.0.0") { RegisterLog(); + // Register Kernel functions to library + ThrowIfNotOK(arrow::compute::Initialize()); } std::shared_ptr FlightSqlDriver::CreateConnection(OdbcVersion odbc_version) { diff --git a/cpp/src/arrow/flight/sql/odbc/flight_sql/utils_test.cc b/cpp/src/arrow/flight/sql/odbc/flight_sql/utils_test.cc index 1575bf09fab..f5d61da50bf 100644 --- a/cpp/src/arrow/flight/sql/odbc/flight_sql/utils_test.cc +++ b/cpp/src/arrow/flight/sql/odbc/flight_sql/utils_test.cc @@ -19,6 +19,7 @@ #include "arrow/flight/sql/odbc/odbcabstraction/include/odbcabstraction/calendar_utils.h" +#include "arrow/compute/initialize.h" #include "arrow/testing/builder.h" #include "arrow/testing/gtest_util.h" #include "arrow/testing/util.h" @@ -27,6 +28,13 @@ namespace driver { namespace flight_sql { +class UtilTestsWithCompute : public ::testing::Test { + public: + // This must be done before using the compute kernels in order to + // register them to the FunctionRegistry. + void SetUp() override { ASSERT_OK(arrow::compute::Initialize()); } +}; + void AssertConvertedArray(const std::shared_ptr& expected_array, const std::shared_ptr& converted_array, uint64_t size, arrow::Type::type arrow_type) { @@ -80,7 +88,7 @@ void TestTime64ArrayConversion(const std::vector& input, AssertConvertedArray(expected_array, converted_array, input.size(), arrow_type); } -TEST(Utils, Time32ToTimeStampArray) { +TEST_F(UtilTestsWithCompute, Time32ToTimeStampArray) { std::vector input_data = {14896, 17820}; const auto seconds_from_epoch = odbcabstraction::GetTodayTimeFromEpoch(); @@ -100,7 +108,7 @@ TEST(Utils, Time32ToTimeStampArray) { arrow::Type::TIMESTAMP); } -TEST(Utils, Time64ToTimeStampArray) { +TEST_F(UtilTestsWithCompute, Time64ToTimeStampArray) { std::vector input_data = {1579489200000, 1646881200000}; const auto seconds_from_epoch = odbcabstraction::GetTodayTimeFromEpoch(); @@ -120,7 +128,7 @@ TEST(Utils, Time64ToTimeStampArray) { arrow::Type::TIMESTAMP); } -TEST(Utils, StringToDateArray) { +TEST_F(UtilTestsWithCompute, StringToDateArray) { std::shared_ptr expected; arrow::ArrayFromVector({1579489200000, 1646881200000}, &expected); @@ -129,7 +137,7 @@ TEST(Utils, StringToDateArray) { odbcabstraction::CDataType_DATE, arrow::Type::DATE64); } -TEST(Utils, StringToTimeArray) { +TEST_F(UtilTestsWithCompute, StringToTimeArray) { std::shared_ptr expected; arrow::ArrayFromVector( time64(arrow::TimeUnit::MICRO), {36000000000, 43200000000}, &expected);