diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000..bb3f296 --- /dev/null +++ b/.gitattributes @@ -0,0 +1,11 @@ +# Set default behaviour to automatically normalize line endings. +* text=auto + +# Force batch scripts to always use CRLF line endings so that if a repo is +# accessed in Windows via a file share from Linux, the scripts will work. +*.{cmd,[cC][mM][dD]} text eol=crlf +*.{bat,[bB][aA][tT]} text eol=crlf + +# Force bash scripts to always use LF line endings so that if a repo is +# accessed in Unix via a file share from Windows, the scripts will work. +*.sh text eol=lf diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 5f21835..76a4e1a 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -15,14 +15,20 @@ jobs: env: OS: ${{ matrix.os }} PYTHON: ${{ matrix.python-version }} + PYTHONIOENCODING: utf-8 PIP_DOWNLOAD_CACHE: ${{ github.workspace }}/../.pip_download_cache strategy: - fail-fast: true + fail-fast: false matrix: - os: [ubuntu-20.04] + os: [ubuntu-20.04, macos-latest, windows-latest] python-version: [3.6, 3.7, 3.8, 3.9] steps: + - name: Set git crlf/eol + run: | + git config --global core.autocrlf false + git config --global core.eol lf + - uses: actions/checkout@v2 with: fetch-depth: 0 @@ -31,12 +37,28 @@ jobs: with: python-version: ${{ matrix.python-version }} - - name: Add requirements + - name: Add pip requirements run: | python -m pip install --upgrade pip pip install tox tox-gh-actions - - name: apt helper action + - name: Install macos deps with brew + if: runner.os == 'macOS' + run: | + brew install ninja + + - name: Prepare compiler environment for ${{ matrix.os }} + if: runner.os == 'Windows' + uses: ilammy/msvc-dev-cmd@v1 + with: + arch: x64 + + - name: Set have package true for ${{ matrix.os }} + if: runner.os == 'Linux' + run: | + echo "HAVE_LIBDATRIE_PKG=TRUE" >> $GITHUB_ENV + + - name: Install deps with apt helper action if: runner.os == 'Linux' uses: ryankurte/action-apt@v0.2.0 with: @@ -45,6 +67,8 @@ jobs: packages: libdatrie-dev pybind11-dev ninja-build - name: Test in place + # windows does not like build_ext -i or removing previous build + if: runner.os != 'Windows' run: | tox -e py diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000..5892b9f --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,143 @@ +name: Release + +on: + push: + # release on tag push + tags: + - '*' + +jobs: + cibw_wheels: + name: Build wheels on ${{ matrix.os }} for Python + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: [ubuntu-20.04, macos-latest, windows-latest] + + steps: + - uses: actions/checkout@v2 + with: + fetch-depth: 0 + + - uses: actions/setup-python@v2 + name: Install Python + with: + python-version: '3.8' + + - name: Prepare compiler environment for Windows + if: runner.os == 'Windows' + uses: ilammy/msvc-dev-cmd@v1 + with: + arch: amd64 + + - name: Install cibuildwheel + run: | + python -m pip install --upgrade pip + python -m pip install cibuildwheel==1.7.1 + + - name: Build wheels + env: + CIBW_MANYLINUX_X86_64_IMAGE: quay.io/pypa/manylinux2010_x86_64:latest + CIBW_MANYLINUX_I686_IMAGE: quay.io/pypa/manylinux2010_i686:latest + CIBW_BUILD: cp36-* cp37-* cp38-* cp39-* + CIBW_SKIP: "*-win32" + CIBW_BEFORE_ALL_LINUX: > + yum -y -q --enablerepo=extras install epel-release + && yum install -y ninja-build + CIBW_REPAIR_WHEEL_COMMAND_LINUX: "auditwheel show {wheel} && auditwheel repair -w {dest_dir} {wheel}" + CIBW_BEFORE_ALL_MACOS: > + brew install pybind11 ninja + CIBW_ENVIRONMENT_MACOS: MACOSX_DEPLOYMENT_TARGET=10.09 + CIBW_REPAIR_WHEEL_COMMAND_MACOS: "pip uninstall -y delocate && pip install git+https://github.com/Chia-Network/delocate.git && delocate-listdeps {wheel} && delocate-wheel -w {dest_dir} -v {wheel}" + CIBW_TEST_COMMAND: python -c "import datrie" + run: | + python -m cibuildwheel --output-dir wheelhouse + + - uses: actions/upload-artifact@v2 + with: + path: ./wheelhouse/*.whl + + sdist: + name: Build source distribution + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + + - uses: actions/setup-python@v2 + name: Install Python + with: + python-version: '3.7' + + - name: Build sdist + run: | + pip install pep517 + python -m pep517.build -s . + + - uses: actions/upload-artifact@v2 + with: + path: dist/*.tar.gz + + create_release: + needs: [cibw_wheels, sdist] + runs-on: ubuntu-20.04 + + steps: + - name: Get version + id: get_version + run: | + echo "VERSION=${GITHUB_REF/refs\/tags\//}" >> $GITHUB_ENV + echo ${{ env.VERSION }} + + - uses: actions/checkout@v2 + with: + fetch-depth: 0 + + - uses: actions/setup-python@v2 + name: Install Python + with: + python-version: 3.7 + + # download all artifacts to project dir + - uses: actions/download-artifact@v2 + + - name: Install gitchangelog + run: | + python -m pip install https://github.com/freepn/gitchangelog/archive/3.0.5.tar.gz + + - name: Generate changes file + run: | + export GITCHANGELOG_CONFIG_FILENAME=$(get-rcpath) + bash -c 'gitchangelog $(git tag --sort=taggerdate | tail -n2 | head -n1)..${{ env.VERSION }} > CHANGES.md' + + - name: Create draft release + id: create_release + uses: softprops/action-gh-release@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + tag_name: ${{ env.VERSION }} + name: Release v${{ env.VERSION }} + body_path: CHANGES.md + draft: true + prerelease: true + # uncomment below to upload wheels to github releases + files: wheels/datrie*.whl + + #upload_pypi: + #needs: [cibw_wheels, sdist] + #runs-on: ubuntu-latest + ## upload to PyPI on every tag starting with 'v' + #if: github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags/v') + ## alternatively, to publish when a GitHub Release is created, use the following rule: + ## if: github.event_name == 'release' && github.event.action == 'published' + #steps: + #- uses: actions/download-artifact@v2 + #with: + #name: artifact + #path: dist + + #- uses: pypa/gh-action-pypi-publish@master + #with: + #user: __token__ + #password: ${{ secrets.pypi_password }} + diff --git a/.github/workflows/vs_env.bat b/.github/workflows/vs_env.bat deleted file mode 100644 index 81bf72f..0000000 --- a/.github/workflows/vs_env.bat +++ /dev/null @@ -1,10 +0,0 @@ -@echo off - -SET VSWHERE="C:\Program Files (x86)\Microsoft Visual Studio\Installer\vswhere" - -:: See https://github.com/microsoft/vswhere/wiki/Find-VC -for /f "usebackq delims=*" %%i in (`%VSWHERE% -latest -property installationPath`) do ( - call "%%i\VC\Auxiliary\Build\vcvarsall.bat" %* -) - -bash -c "export -p > env.sh" diff --git a/.github/workflows/wheel-check.sh b/.github/workflows/wheel-check.sh new file mode 100755 index 0000000..dfe15b1 --- /dev/null +++ b/.github/workflows/wheel-check.sh @@ -0,0 +1,17 @@ +#!/usr/bin/env bash + +set -euxo pipefail + +EXPECTED_WHEEL_COUNT=$1 + +if ! [ -n $EXPECTED_WHEEL_COUNT ]; then + exit 0 +fi + +WHEELS=$(find . -maxdepth 3 -name \*.whl) +if [ $(echo $WHEELS | wc -w) -ne $EXPECTED_WHEEL_COUNT ]; then + echo "Error: Expected $EXPECTED_WHEEL_COUNT wheels" + exit 1 +else + exit 0 +fi diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml new file mode 100644 index 0000000..fe7d296 --- /dev/null +++ b/.github/workflows/wheels.yml @@ -0,0 +1,92 @@ +name: Wheels + +on: + workflow_dispatch: + pull_request: + push: + branches: + - master + +jobs: + cibw_wheels: + name: Build wheels on ${{ matrix.os }} for Python + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: [ubuntu-20.04, macos-latest, windows-latest] + + steps: + - uses: actions/checkout@v2 + with: + fetch-depth: 0 + + - uses: actions/setup-python@v2 + name: Install Python + with: + python-version: '3.8' + + - name: Prepare compiler environment for Windows + if: runner.os == 'Windows' + uses: ilammy/msvc-dev-cmd@v1 + with: + arch: amd64 + + - name: Install cibuildwheel + run: | + python -m pip install --upgrade pip + python -m pip install cibuildwheel==1.7.1 + + - name: Build wheels + env: + CIBW_MANYLINUX_X86_64_IMAGE: quay.io/pypa/manylinux2010_x86_64:latest + CIBW_MANYLINUX_I686_IMAGE: quay.io/pypa/manylinux2010_i686:latest + CIBW_BUILD: cp36-* cp37-* cp38-* cp39-* + CIBW_SKIP: "*-win32" + CIBW_BEFORE_ALL_LINUX: > + yum -y -q --enablerepo=extras install epel-release + && yum install -y ninja-build + CIBW_REPAIR_WHEEL_COMMAND_LINUX: "auditwheel show {wheel} && auditwheel repair -w {dest_dir} {wheel}" + CIBW_BEFORE_ALL_MACOS: > + brew install pybind11 ninja + CIBW_ENVIRONMENT_MACOS: MACOSX_DEPLOYMENT_TARGET=10.09 + CIBW_REPAIR_WHEEL_COMMAND_MACOS: "pip uninstall -y delocate && pip install git+https://github.com/Chia-Network/delocate.git && delocate-listdeps {wheel} && delocate-wheel -w {dest_dir} -v {wheel}" + CIBW_TEST_COMMAND: python -c "import datrie" + run: | + python -m cibuildwheel --output-dir wheelhouse + + - uses: actions/upload-artifact@v2 + with: + name: wheels + path: ./wheelhouse/*.whl + + check_artifacts: + name: Check artifacts are correct + needs: [cibw_wheels] + runs-on: ubuntu-20.04 + steps: + - uses: actions/checkout@v2 + - uses: actions/download-artifact@v2 + with: + name: wheels + + - name: Check number of downloaded artifacts + run: .github/workflows/wheel-check.sh 24 + + #upload_pypi: + #needs: [cibw_wheels, sdist] + #runs-on: ubuntu-latest + ## upload to PyPI on every tag starting with 'v' + #if: github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags/v') + ## alternatively, to publish when a GitHub Release is created, use the following rule: + ## if: github.event_name == 'release' && github.event.action == 'published' + #steps: + #- uses: actions/download-artifact@v2 + #with: + #name: artifact + #path: dist + + #- uses: pypa/gh-action-pypi-publish@master + #with: + #user: __token__ + #password: ${{ secrets.pypi_password }} + diff --git a/.gitmodules b/.gitmodules deleted file mode 100644 index 3986f13..0000000 --- a/.gitmodules +++ /dev/null @@ -1,3 +0,0 @@ -[submodule "libdatrie"] - path = libdatrie - url = https://github.com/tlwg/libdatrie.git diff --git a/CMakeLists.txt b/CMakeLists.txt index ced2ce4..18e2530 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,15 +1,47 @@ cmake_minimum_required(VERSION 3.15...3.18) -project(datrie LANGUAGES C CXX) - option(PY_DEBUG "Set if python being linked is a Py_DEBUG build" OFF) +option(USE_LIBDATRIE_PKG "Use OS-provided libdatrie package") +if(DEFINED ENV{HAVE_LIBDATRIE_PKG}) + set(USE_LIBDATRIE_PKG "$ENV{HAVE_LIBDATRIE_PKG}") +endif() if(NOT CMAKE_BUILD_TYPE) set(CMAKE_BUILD_TYPE "RelWithDebInfo" CACHE STRING "Default build type: RelWithDebInfo" FORCE) endif() +project(datrie LANGUAGES C CXX) + include(GNUInstallDirs) +include(CheckIncludeFile) +include(CheckIncludeFileCXX) +include(CheckIncludeFiles) + +list(APPEND CMAKE_MODULE_PATH ${CMAKE_CURRENT_SOURCE_DIR}/cmake/modules) + +if(USE_LIBDATRIE_PKG) + find_package(Datrie) +endif() + +if(USE_LIBDATRIE_PKG AND NOT Datrie_FOUND) + find_package(PkgConfig) + pkg_check_modules(DATRIE datrie-0.2 IMPORTED_TARGET) +endif() + +if(NOT USE_LIBDATRIE_PKG) + message(STATUS "Fetching libdatrie from github") + # Fetch libdatrie + include(FetchContent) + + FetchContent_Declare( + libdatrie + GIT_REPOSITORY https://github.com/tlwg/libdatrie + GIT_TAG v0.2.13 + ) + FetchContent_MakeAvailable(libdatrie) + # this gets us the package source directory +endif() find_package(pybind11 CONFIG) @@ -30,13 +62,9 @@ endif() find_package(Threads REQUIRED) -if (${PYTHON_IS_DEBUG}) +if(${PYTHON_IS_DEBUG}) set(PY_DEBUG ON) endif() -set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} - ${PROJECT_SOURCE_DIR}/cmake/) - include_directories(${PROJECT_SOURCE_DIR}/src) - add_subdirectory(src) diff --git a/MANIFEST.in b/MANIFEST.in index 23ef592..4a82ba2 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,13 +1,9 @@ global-include CMakeLists.txt *.cmake include README.rst CHANGES.rst COPYING -include tox.ini -include tox-bench.ini -include update_c.sh +graft src +graft tests +include tox.ini tox-bench.ini update_c.sh include bench/words100k.txt.zip -recursive-include tests *.py - -include src/datrie.pyx -include src/cdatrie.pxd -include src/stdio_ext.pxd exclude src/datrie.c +exclude src/*.html global-exclude *.py[cod] __pycache__ diff --git a/cmake/FindCython.cmake b/cmake/modules/FindCython.cmake similarity index 100% rename from cmake/FindCython.cmake rename to cmake/modules/FindCython.cmake diff --git a/cmake/modules/FindDatrie.cmake b/cmake/modules/FindDatrie.cmake new file mode 100644 index 0000000..23167c9 --- /dev/null +++ b/cmake/modules/FindDatrie.cmake @@ -0,0 +1,53 @@ +# This module finds headers and libdatrie library. +# Results are reported in variables: +# Datrie_FOUND - True if headers and library were found +# Datrie_INCLUDE_DIRS - libdatrie include directories +# Datrie_LIBRARIES - libdatrie library to be linked + +find_path(Datrie_INCLUDE_DIR + NAMES datrie/triedefs.h + HINTS + ENV VCPKG_ROOT + PATH_SUFFIXES include include/datrie + PATHS + ~/Library/Frameworks + /Library/Frameworks + /opt/local + /opt + /usr + /usr/local/ +) + +find_library(Datrie_LIBRARY + NAMES datrie libdatrie + HINTS + ENV VCPKG_ROOT + PATH_SUFFIXES lib lib64 lib32 + PATHS + ~/Library/Frameworks + /Library/Frameworks + /opt/local + /opt + /usr + /usr/local/ +) + +mark_as_advanced(Datrie_INCLUDE_DIR Datrie_LIBRARY) + +include(FindPackageHandleStandardArgs) +find_package_handle_standard_args(Datrie + REQUIRED_VARS Datrie_LIBRARY Datrie_INCLUDE_DIR) + +if(Datrie_FOUND) + # need if _FOUND guard to allow project to autobuild; can't overwrite imported target even if bad + set(Datrie_INCLUDE_DIRS ${Datrie_INCLUDE_DIR}) + set(Datrie_LIBRARIES ${Datrie_LIBRARY}) + + if(NOT TARGET Datrie::Datrie) + add_library(Datrie::Datrie INTERFACE IMPORTED) + set_target_properties(Datrie::Datrie PROPERTIES + INTERFACE_LINK_LIBRARIES "${Datrie_LIBRARIES}" + INTERFACE_INCLUDE_DIRECTORIES "${Datrie_INCLUDE_DIR}" + ) + endif() +endif(Datrie_FOUND) diff --git a/libdatrie b/libdatrie deleted file mode 160000 index d1db08a..0000000 --- a/libdatrie +++ /dev/null @@ -1 +0,0 @@ -Subproject commit d1db08ac1c76f54ba23d63665437473788c999f3 diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index 8b044b8..eeed7eb 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -1,16 +1,15 @@ set(CMAKE_EXPORT_COMPILE_COMMANDS ON) +set(cython_module datrie) find_package(Cython REQUIRED) -set(cython_module datrie) - set(datrie_include_dir "${PROJECT_SOURCE_DIR}/src") set(cython_output "${CMAKE_CURRENT_SOURCE_DIR}/${cython_module}.c") set(cython_src ${cython_module}.pyx) # Track cython sources file(GLOB cy_srcs *.pyx *.pxd) -# .pyx -> .cpp +# .pyx -> .c add_custom_command(OUTPUT ${cython_output} COMMAND ${CYTHON_EXECUTABLE} -a -2 @@ -21,6 +20,16 @@ add_custom_command(OUTPUT ${cython_output} DEPENDS ${cy_srcs} COMMENT "Cythonizing extension ${cython_src}") +if(NOT USE_LIBDATRIE_PKG) + # use the locally cloned source from FetchContent + set(DATRIE_INCLUDE_DIR "${libdatrie_SOURCE_DIR}") + file(GLOB_RECURSE DATRIE_SOURCES + LIST_DIRECTORIES true + "${libdatrie_SOURCE_DIR}/datrie/*.c") + list(APPEND cython_output ${DATRIE_SOURCES}) + include_directories(${DATRIE_INCLUDE_DIR}) +endif() + add_library(${cython_module} MODULE ${cython_output}) set_target_properties(${cython_module} @@ -32,33 +41,12 @@ target_include_directories(${cython_module} PUBLIC target_compile_definitions(${cython_module} PRIVATE VERSION_INFO=${SCM_VERSION_INFO}) -# here we get to jump through some hoops to find libdatrie on the manylinux -# docker CI images, etc -find_package(datrie CONFIG NAMES datrie) - -if(datrie_FOUND) - message(STATUS "System datrie found") - target_link_libraries(${cython_module} PRIVATE datrie) -elseif(NOT MSVC) - message(STATUS "Trying PkgConfig") - find_package(PkgConfig REQUIRED) - pkg_check_modules(DATRIE datrie-0.2 REQUIRED IMPORTED_TARGET) - - if(DATRIE_FOUND) - include_directories(${DATRIE_INCLUDE_DIRS}) - target_link_libraries(${cython_module} PRIVATE PkgConfig::DATRIE) - else() - # last resort for manylinux: just try it - message(STATUS "Blindly groping instead") - link_directories("/usr/lib64" "/usr/lib") - target_link_libraries(${cython_module} PRIVATE "libdatrie.so") - endif() -else() - # even though we used vcpkg, we get to do the manual dance with windows - find_path(DATRIE_INCLUDE_DIRS datrie/triedefs.h) - find_library(DATRIE_LIBS NAMES datrie libdatrie) - target_include_directories(${cython_module} PUBLIC ${DATRIE_INCLUDE_DIRS}) - target_link_libraries(${cython_module} PRIVATE ${DATRIE_LIBS}) +if(Datrie_FOUND) + include_directories(${DATRIE_INCLUDE_DIRS}) + target_link_libraries(${cython_module} PRIVATE Datrie::Datrie) +elseif(DATRIE_FOUND) + include_directories(${DATRIE_INCLUDE_DIRS}) + target_link_libraries(${cython_module} PRIVATE PkgConfig::DATRIE) endif() if(APPLE) diff --git a/tox.ini b/tox.ini index 7230669..c0e0424 100644 --- a/tox.ini +++ b/tox.ini @@ -18,6 +18,8 @@ passenv = CMAKE_BUILD_OVERRIDE CMAKE_TOOLCHAIN_FILE CMAKE_GENERATOR + HAVE_LIBDATRIE_PKG + PYTHONIOENCODING PIP_DOWNLOAD_CACHE setenv = @@ -31,9 +33,9 @@ deps = hypothesis commands = - python -c "import path; path.Path('build').rmtree_p()" python setup.py build_ext --inplace python -m pytest [] + python -c "import path; path.Path('build').rmtree_p()" [testenv:dev] skip_install = true @@ -45,6 +47,8 @@ passenv = CMAKE_BUILD_OVERRIDE CMAKE_TOOLCHAIN_FILE CMAKE_GENERATOR + HAVE_LIBDATRIE_PKG + PYTHONIOENCODING PIP_DOWNLOAD_CACHE deps = @@ -66,18 +70,16 @@ passenv = CMAKE_BUILD_OVERRIDE CMAKE_TOOLCHAIN_FILE CMAKE_GENERATOR + HAVE_LIBDATRIE_PKG + PYTHONIOENCODING PIP_DOWNLOAD_CACHE -allowlist_externals = bash - deps = pip>=20.0.1 pep517 twine - path commands = - python -c "import path; path.Path('build').rmtree_p()" python -m pep517.build . twine check dist/* @@ -85,6 +87,8 @@ commands = skip_install = true passenv = CI + PYTHONIOENCODING + PIP_DOWNLOAD_CACHE deps = pip>=20.0.1