diff --git a/.github/dependabot.yml b/.github/dependabot.yml index c2b051d92..ce9e973a2 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -11,30 +11,3 @@ updates: interval: daily time: "02:00" open-pull-requests-limit: 10 - ignore: - - dependency-name: actix - versions: - - 0.11.0 - - 0.11.1 - - dependency-name: docopt - versions: - - 1.1.1 - - dependency-name: actix-rt - versions: - - 2.0.0 - - 2.0.2 - - 2.1.0 - - 2.2.0 - - dependency-name: postgres - versions: - - 0.19.1 - - dependency-name: serde - versions: - - 1.0.124 - - 1.0.125 - - dependency-name: env_logger - versions: - - 0.8.3 - - dependency-name: criterion - versions: - - 0.3.4 diff --git a/.github/templates/homebrew.martin.rb.j2 b/.github/templates/homebrew.martin.rb.j2 new file mode 100644 index 000000000..fcf4c6fe1 --- /dev/null +++ b/.github/templates/homebrew.martin.rb.j2 @@ -0,0 +1,52 @@ +# +# ATTENTION: This is an autogenerated file. See original at +# https://github.com/maplibre/martin/blob/main/.github/templates/homebrew.martin.rb.j2 +# + +class Martin < Formula + current_version="{{ version }}" + + desc "Blazing fast and lightweight tile server with PostGIS, MBTiles, and PMTiles support, plus an mbtiles tool" + homepage "https://github.com/maplibre/martin" + + on_macos do + on_arm do + sha256 "{{ macos_arm_sha256 }}}" + url "https://github.com/maplibre/martin/releases/download/v#{current_version}/martin-Darwin-aarch64.tar.gz" + end + on_intel do + sha256 "{{ macos_intel_sha256 }}" + url "https://github.com/maplibre/martin/releases/download/v#{current_version}/martin-Darwin-x86_64.tar.gz" + end + end + + on_linux do + on_arm do + sha256 "{{ linux_arm_sha256 }}" + url "https://github.com/maplibre/martin/releases/download/v#{current_version}/martin-Linux-aarch64-musl.tar.gz" + end + on_intel do + sha256 "{{ linux_intel_sha256 }}" + url "https://github.com/maplibre/martin/releases/download/v#{current_version}/martin-Linux-x86_64-musl.tar.gz" + end + end + + version "#{current_version}" + + def install + bin.install "martin" + bin.install "mbtiles" + end + + def caveats; <<~EOS + Martin requires a database connection string. + It can be passed as a command-line argument or as a DATABASE_URL environment variable. + martin postgres://postgres@localhost/db + EOS + end + + test do + `#{bin}/martin --version` + `#{bin}/mbtiles --version` + end +end diff --git a/.github/workflows/bench.yml b/.github/workflows/bench.yml index 87d40d114..1fad55133 100644 --- a/.github/workflows/bench.yml +++ b/.github/workflows/bench.yml @@ -1,20 +1,18 @@ name: Benchmark on: - pull_request: - types: [ opened, synchronize, reopened ] - paths-ignore: - - '**.md' - - 'demo/**' - - 'docs/**' - - 'homebrew-formula/**' - push: - branches: [ main ] - paths-ignore: - - '**.md' - - 'demo/**' - - 'docs/**' - - 'homebrew-formula/**' +# push: +# branches: [ main ] +# paths-ignore: +# - '**.md' +# - 'demo/**' +# - 'docs/**' +# pull_request: +# types: [ opened, synchronize, reopened ] +# paths-ignore: +# - '**.md' +# - 'demo/**' +# - 'docs/**' workflow_dispatch: jobs: diff --git a/.github/workflows/build-deploy-docs.yml b/.github/workflows/build-deploy-docs.yml index 0a07b310d..81d0713ed 100644 --- a/.github/workflows/build-deploy-docs.yml +++ b/.github/workflows/build-deploy-docs.yml @@ -13,6 +13,7 @@ jobs: group: ${{ github.workflow }}-${{ github.ref }} steps: - uses: actions/checkout@v4 + - uses: Swatinem/rust-cache@v2 - name: Setup mdBook uses: peaceiris/actions-mdbook@v1 @@ -23,7 +24,7 @@ jobs: - name: Deploy uses: peaceiris/actions-gh-pages@v3 - if: ${{ github.ref == 'refs/heads/main' }} + if: github.ref == 'refs/heads/main' with: github_token: ${{ secrets.GITHUB_TOKEN }} publish_dir: ./target/book diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index fa5bd6473..3188b51d0 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -3,18 +3,12 @@ name: CI on: push: branches: [ main ] - paths-ignore: - - '**.md' - - 'demo/**' - - 'docs/**' - - 'homebrew-formula/**' pull_request: branches: [ main ] paths-ignore: - '**.md' - 'demo/**' - 'docs/**' - - 'homebrew-formula/**' release: types: [ published ] workflow_dispatch: @@ -24,6 +18,220 @@ defaults: shell: bash jobs: + lint-debug-test: + name: Lint and Unit test + runs-on: ubuntu-latest + env: + PGDATABASE: test + PGHOST: localhost + PGUSER: postgres + PGPASSWORD: postgres + services: + postgres: + image: postgis/postgis:16-3.4 + ports: + # will assign a random free host port + - 5432/tcp + # Sadly there is currently no way to pass arguments to the service image other than this hack + # See also https://stackoverflow.com/a/62720566/177275 + options: >- + -e POSTGRES_DB=test + -e POSTGRES_USER=postgres + -e POSTGRES_PASSWORD=postgres + -e PGDATABASE=test + -e PGUSER=postgres + -e PGPASSWORD=postgres + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + --entrypoint sh + postgis/postgis:16-3.4 + -c "exec docker-entrypoint.sh postgres -c ssl=on -c ssl_cert_file=/etc/ssl/certs/ssl-cert-snakeoil.pem -c ssl_key_file=/etc/ssl/private/ssl-cert-snakeoil.key" + steps: + - name: Checkout sources + uses: actions/checkout@v4 + - name: Rust Versions + run: rustc --version && cargo --version + - uses: Swatinem/rust-cache@v2 + if: github.event_name != 'release' && github.event_name != 'workflow_dispatch' + - run: cargo fmt --all -- --check + - run: cargo clippy --package martin-tile-utils -- -D warnings + - run: cargo clippy --package mbtiles --no-default-features -- -D warnings + - run: cargo clippy --package mbtiles -- -D warnings + - run: cargo clippy --package martin -- -D warnings + - run: cargo clippy --package martin --features bless-tests -- -D warnings + - run: cargo doc --no-deps --workspace + env: + RUSTDOCFLAGS: "-D warnings" + - name: Init database + run: tests/fixtures/initdb.sh + env: + PGPORT: ${{ job.services.postgres.ports[5432] }} + - name: Run cargo test + run: | + set -x + cargo test --package martin-tile-utils + cargo test --package mbtiles --no-default-features + cargo test --package mbtiles + cargo test --package martin + cargo test --doc + env: + DATABASE_URL: postgres://${{ env.PGUSER }}:${{ env.PGUSER }}@${{ env.PGHOST }}:${{ job.services.postgres.ports[5432] }}/${{ env.PGDATABASE }}?sslmode=require + + docker-build-test: + name: Build and test docker images + runs-on: ubuntu-latest + env: + # PG_* variables are used by psql + PGDATABASE: test + PGHOST: localhost + PGUSER: postgres + PGPASSWORD: postgres + TARGETS: "aarch64-unknown-linux-musl x86_64-unknown-linux-musl" + # TODO: aarch64-unknown-linux-gnu + services: + postgres: + image: postgis/postgis:15-3.3 + ports: + - 5432/tcp + options: >- + -e POSTGRES_DB=test + -e POSTGRES_USER=postgres + -e POSTGRES_PASSWORD=postgres + -e PGDATABASE=test + -e PGUSER=postgres + -e PGPASSWORD=postgres + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + --health-retries 5 + --entrypoint sh + postgis/postgis:15-3.3 + -c "exec docker-entrypoint.sh postgres -c ssl=on -c ssl_cert_file=/etc/ssl/certs/ssl-cert-snakeoil.pem -c ssl_key_file=/etc/ssl/private/ssl-cert-snakeoil.key" + steps: + - name: Checkout sources + uses: actions/checkout@v4 + - uses: Swatinem/rust-cache@v2 + if: github.event_name != 'release' && github.event_name != 'workflow_dispatch' + - name: Install cross + run: | + cargo install cross + # Install latest cross version from git (disabled as it is probably less stable) + # cargo install cross --git https://github.com/cross-rs/cross + cross --version + - name: Init database + run: tests/fixtures/initdb.sh + env: + PGPORT: ${{ job.services.postgres.ports[5432] }} + + - name: Set up QEMU + uses: docker/setup-qemu-action@v3 + # https://github.com/docker/setup-qemu-action + with: + platforms: linux/amd64,linux/arm64 + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + # https://github.com/docker/setup-buildx-action + with: + install: true + platforms: linux/amd64,linux/arm64 + + - name: Build targets + run: | + for target in $TARGETS; do + echo -e "\n----------------------------------------------" + echo "Building $target" + + export "CARGO_TARGET_$(echo $target | tr 'a-z-' 'A-Z_')_RUSTFLAGS"='-C strip=debuginfo' + cross build --release --target $target --package mbtiles + cross build --release --target $target --package martin + + mkdir -p target_releases/$target + mv target/$target/release/mbtiles target_releases/$target + mv target/$target/release/martin target_releases/$target + done + + - name: Save build artifacts to build-${{ matrix.target }} + uses: actions/upload-artifact@v3 + with: + name: cross-build + path: target_releases/* + - name: Reorganize artifacts for docker build + run: | + mkdir -p target_releases/linux/arm64 + mv target_releases/aarch64-unknown-linux-musl/* target_releases/linux/arm64/ + mkdir -p target_releases/linux/amd64 + mv target_releases/x86_64-unknown-linux-musl/* target_releases/linux/amd64/ + + - name: Build linux/arm64 Docker image + uses: docker/build-push-action@v5 + # https://github.com/docker/build-push-action + with: + context: . + file: multi-platform.Dockerfile + load: true + tags: ${{ github.repository }}:linux-arm64 + platforms: linux/arm64 + - name: Test linux/arm64 Docker image + run: | + PLATFORM=linux/arm64 + TAG=${{ github.repository }}:linux-arm64 + export MBTILES_BUILD=- + export MBTILES_BIN="docker run --rm --net host --platform $PLATFORM -e DATABASE_URL -v $PWD/tests:/tests --entrypoint /usr/local/bin/mbtiles $TAG" + export MARTIN_BUILD=- + export MARTIN_BIN="docker run --rm --net host --platform $PLATFORM -e DATABASE_URL -v $PWD/tests:/tests $TAG" + tests/test.sh + env: + DATABASE_URL: postgres://${{ env.PGUSER }}:${{ env.PGUSER }}@${{ env.PGHOST }}:${{ job.services.postgres.ports[5432] }}/${{ env.PGDATABASE }}?sslmode=require + + - name: Build linux/amd64 Docker image + uses: docker/build-push-action@v5 + # https://github.com/docker/build-push-action + with: + context: . + file: multi-platform.Dockerfile + load: true + tags: ${{ github.repository }}:linux-amd64 + platforms: linux/amd64 + - name: Test linux/amd64 Docker image + run: | + PLATFORM=linux/amd64 + TAG=${{ github.repository }}:linux-amd64 + export MBTILES_BUILD=- + export MBTILES_BIN="docker run --rm --net host --platform $PLATFORM -e DATABASE_URL -v $PWD/tests:/tests --entrypoint /usr/local/bin/mbtiles $TAG" + export MARTIN_BUILD=- + export MARTIN_BIN="docker run --rm --net host --platform $PLATFORM -e DATABASE_URL -v $PWD/tests:/tests $TAG" + tests/test.sh + env: + DATABASE_URL: postgres://${{ env.PGUSER }}:${{ env.PGUSER }}@${{ env.PGHOST }}:${{ job.services.postgres.ports[5432] }}/${{ env.PGDATABASE }}?sslmode=require + + - name: Login to GitHub Docker registry + if: github.event_name != 'pull_request' + uses: docker/login-action@v3 + # https://github.com/docker/login-action + with: + registry: ghcr.io + username: ${{ github.actor }} + password: ${{ secrets.GITHUB_TOKEN }} + + - name: Docker meta + id: docker_meta + uses: docker/metadata-action@v5 + # https://github.com/docker/metadata-action + with: + images: ghcr.io/${{ github.repository }} + - name: Push the Docker image + if: github.event_name != 'pull_request' + uses: docker/build-push-action@v5 + with: + context: . + file: multi-platform.Dockerfile + push: true + tags: ${{ steps.docker_meta.outputs.tags }} + labels: ${{ steps.docker_meta.outputs.labels }} + platforms: linux/amd64,linux/arm64 + build: name: Build ${{ matrix.target }} runs-on: ${{ matrix.os }} @@ -34,9 +242,6 @@ jobs: - target: aarch64-apple-darwin os: macOS-latest cross: 'true' - - target: aarch64-unknown-linux-gnu - os: ubuntu-latest - cross: 'true' - target: debian-x86_64 os: ubuntu-latest cross: 'true' @@ -48,73 +253,40 @@ jobs: - target: x86_64-unknown-linux-gnu os: ubuntu-latest steps: - - name: Checkout + - name: Checkout sources uses: actions/checkout@v4 - name: Rust Versions run: rustc --version && cargo --version - - name: Lint (Linux) - if: matrix.target == 'x86_64-unknown-linux-gnu' - run: | - set -x - cargo fmt --all -- --check - cargo clippy --package martin-tile-utils -- -D warnings - cargo clippy --package martin-mbtiles -- -D warnings - cargo clippy --package martin-mbtiles --no-default-features --features native-tls -- -D warnings - cargo clippy --package martin-mbtiles --no-default-features --features rustls -- -D warnings - cargo clippy --package martin -- -D warnings - cargo clippy --package martin --features vendored-openssl -- -D warnings - cargo clippy --package martin --features bless-tests -- -D warnings - - name: Install OpenSSL (Windows) - if: runner.os == 'Windows' - shell: powershell - run: | - echo "VCPKG_ROOT=$env:VCPKG_INSTALLATION_ROOT" | Out-File -FilePath $env:GITHUB_ENV -Append - vcpkg install openssl:x64-windows-static-md - - name: Build (native) - if: matrix.cross != 'true' - run: | - cargo build --release --target ${{ matrix.target }} --features=ssl --package martin - cargo build --release --target ${{ matrix.target }} --features=cli --package martin-mbtiles - - name: Build (cross - aarch64-apple-darwin) - if: matrix.target == 'aarch64-apple-darwin' - run: | - rustup target add "${{ matrix.target }}" - # compile without debug symbols because stripping them with `strip` does not work cross-platform - export RUSTFLAGS='-C link-arg=-s' - cargo build --release --target ${{ matrix.target }} --features=vendored-openssl --package martin - cargo build --release --target ${{ matrix.target }} --no-default-features --features=rustls,cli --package martin-mbtiles - - name: Build (cross - aarch64-unknown-linux-gnu) - if: matrix.target == 'aarch64-unknown-linux-gnu' - run: | - sudo apt-get install -y gcc-aarch64-linux-gnu binutils-aarch64-linux-gnu - rustup target add "${{ matrix.target }}" - # compile without debug symbols because stripping them with `strip` does not work cross-platform - export RUSTFLAGS='-C link-arg=-s -C linker=aarch64-linux-gnu-gcc' - cargo build --release --target ${{ matrix.target }} --features=vendored-openssl --package martin - cargo build --release --target ${{ matrix.target }} --no-default-features --features=rustls,cli --package martin-mbtiles - - name: Build (debian package) + - uses: Swatinem/rust-cache@v2 + if: github.event_name != 'release' && github.event_name != 'workflow_dispatch' + - name: Build (.deb) if: matrix.target == 'debian-x86_64' run: | + set -x sudo apt-get install -y dpkg dpkg-dev liblzma-dev cargo install cargo-deb cargo deb -v -p martin --output target/debian/debian-x86_64.deb - - name: Move build artifacts + mkdir -p target_releases + mv target/debian/debian-x86_64.deb target_releases/ + - name: Build + if: matrix.target != 'debian-x86_64' run: | + set -x + rustup target add "${{ matrix.target }}" + export RUSTFLAGS='-C strip=debuginfo' + cargo build --release --target ${{ matrix.target }} --package mbtiles + cargo build --release --target ${{ matrix.target }} --package martin mkdir -p target_releases - if [[ "${{ matrix.target }}" == "debian-x86_64" ]]; then - mv target/debian/debian-x86_64.deb target_releases - else - mv target/${{ matrix.target }}/release/martin${{ matrix.ext }} target_releases - mv target/${{ matrix.target }}/release/mbtiles${{ matrix.ext }} target_releases - fi + mv target/${{ matrix.target }}/release/mbtiles${{ matrix.ext }} target_releases/ + mv target/${{ matrix.target }}/release/martin${{ matrix.ext }} target_releases/ - name: Save build artifacts to build-${{ matrix.target }} uses: actions/upload-artifact@v3 with: name: build-${{ matrix.target }} path: target_releases/* - test: - name: Test ${{ matrix.target }} + test-multi-os: + name: Test on ${{ matrix.os }} runs-on: ${{ matrix.os }} needs: [ build ] strategy: @@ -131,6 +303,8 @@ jobs: steps: - name: Checkout sources uses: actions/checkout@v4 + - uses: Swatinem/rust-cache@v2 + if: github.event_name != 'release' && github.event_name != 'workflow_dispatch' - name: Start postgres uses: nyurik/action-setup-postgis@v1 id: pg @@ -147,18 +321,6 @@ jobs: tests/fixtures/initdb.sh env: DATABASE_URL: ${{ steps.pg.outputs.connection-uri }} - - name: Unit Tests (Linux) - if: matrix.target == 'x86_64-unknown-linux-gnu' - run: | - set -x - cargo test --package martin-tile-utils - cargo test --package martin-mbtiles - cargo test --package martin-mbtiles --no-default-features --features rustls - cargo test --package martin --features vendored-openssl - cargo test --doc - cargo clean - env: - DATABASE_URL: ${{ steps.pg.outputs.connection-uri }} - name: Download build artifact build-${{ matrix.target }} uses: actions/download-artifact@v3 with: @@ -196,16 +358,18 @@ jobs: tests/test.sh env: DATABASE_URL: ${{ steps.pg.outputs.connection-uri }} - - name: Save test output on failure (Linux) - if: failure() && matrix.target == 'x86_64-unknown-linux-gnu' + - name: Save test output on failure + if: failure() uses: actions/upload-artifact@v3 with: - name: failed-test-output - path: tests/output/* + name: failed-test-output-${{ runner.os }} + path: | + tests/output/* + target/test_logs/* retention-days: 5 - test-legacy: - name: Test Legacy DB + test-with-svc: + name: Test postgis:${{ matrix.img_ver }} sslmode=${{ matrix.sslmode }} runs-on: ubuntu-latest needs: [ build ] strategy: @@ -213,30 +377,35 @@ jobs: matrix: include: # These must match the versions of postgres used in the docker-compose.yml - - image: postgis/postgis:11-3.0-alpine + - img_ver: 11-3.0-alpine args: postgres sslmode: disable - - image: postgis/postgis:14-3.3-alpine + - img_ver: 14-3.3-alpine args: postgres sslmode: disable # alpine images don't support SSL, so for this we use the debian images - - image: postgis/postgis:15-3.3 + - img_ver: 15-3.3 args: postgres -c ssl=on -c ssl_cert_file=/etc/ssl/certs/ssl-cert-snakeoil.pem -c ssl_key_file=/etc/ssl/private/ssl-cert-snakeoil.key sslmode: require + # + # FIXME! + # DISABLED because Rustls fails to validate name (CN?) with the NotValidForName error + #- img_ver: 15-3.3 + # args: postgres -c ssl=on -c ssl_cert_file=/etc/ssl/certs/ssl-cert-snakeoil.pem -c ssl_key_file=/etc/ssl/private/ssl-cert-snakeoil.key + # sslmode: verify-ca + #- img_ver: 15-3.3 + # args: postgres -c ssl=on -c ssl_cert_file=/etc/ssl/certs/ssl-cert-snakeoil.pem -c ssl_key_file=/etc/ssl/private/ssl-cert-snakeoil.key + # sslmode: verify-full env: - # PG_* variables are used by psql PGDATABASE: test PGHOST: localhost PGUSER: postgres PGPASSWORD: postgres services: postgres: - image: ${{ matrix.image }} + image: postgis/postgis:${{ matrix.img_ver }} ports: - # will assign a random free host port - 5432/tcp - # Sadly there is currently no way to pass arguments to the service image other than this hack - # See also https://stackoverflow.com/a/62720566/177275 options: >- -e POSTGRES_DB=test -e POSTGRES_USER=postgres @@ -249,15 +418,24 @@ jobs: --health-timeout 5s --health-retries 5 --entrypoint sh - ${{ matrix.image }} + postgis/postgis:${{ matrix.img_ver }} -c "exec docker-entrypoint.sh ${{ matrix.args }}" steps: - name: Checkout sources uses: actions/checkout@v4 - - name: Setup database + - uses: Swatinem/rust-cache@v2 + if: github.event_name != 'release' && github.event_name != 'workflow_dispatch' + - name: Init database run: tests/fixtures/initdb.sh env: PGPORT: ${{ job.services.postgres.ports[5432] }} + - name: Get DB SSL cert (sslmode=verify-*) + if: matrix.sslmode == 'verify-ca' || matrix.sslmode == 'verify-full' + run: | + set -x + mkdir -p target/certs + docker cp ${{ job.services.postgres.id }}:/etc/ssl/certs/ssl-cert-snakeoil.pem target/certs/server.crt + docker cp ${{ job.services.postgres.id }}:/etc/ssl/private/ssl-cert-snakeoil.key target/certs/server.key - name: Download build artifact build-x86_64-unknown-linux-gnu uses: actions/download-artifact@v3 with: @@ -265,6 +443,9 @@ jobs: path: target_releases/ - name: Integration Tests run: | + if [[ "${{ matrix.sslmode }}" == "verify-ca" || "${{ matrix.sslmode }}" == "verify-full" ]]; then + export PGSSLROOTCERT=target/certs/server.crt + fi export MARTIN_BUILD=- export MARTIN_BIN=target_releases/martin export MBTILES_BUILD=- @@ -282,6 +463,9 @@ jobs: - name: Tests Debian package run: | sudo dpkg -i target_releases/debian-x86_64.deb + if [[ "${{ matrix.sslmode }}" == "verify-ca" || "${{ matrix.sslmode }}" == "verify-full" ]]; then + export PGSSLROOTCERT=target/certs/server.crt + fi export MARTIN_BUILD=- export MARTIN_BIN=/usr/bin/martin export MBTILES_BUILD=- @@ -292,17 +476,13 @@ jobs: env: DATABASE_URL: postgres://${{ env.PGUSER }}:${{ env.PGUSER }}@${{ env.PGHOST }}:${{ job.services.postgres.ports[5432] }}/${{ env.PGDATABASE }}?sslmode=${{ matrix.sslmode }} - name: Unit Tests + if: matrix.sslmode != 'verify-ca' && matrix.sslmode != 'verify-full' run: | echo "Running unit tests, connecting to DATABASE_URL=$DATABASE_URL" echo "Same but as base64 to prevent GitHub obfuscation (this is not a secret):" echo "$DATABASE_URL" | base64 set -x - cargo test --package martin-tile-utils - cargo test --package martin-mbtiles - cargo test --package martin-mbtiles --no-default-features --features rustls - cargo test --package martin --features vendored-openssl - cargo test --doc - RUSTDOCFLAGS="-D warnings" cargo doc --no-deps --workspace + cargo test --package martin cargo clean env: DATABASE_URL: postgres://${{ env.PGUSER }}:${{ env.PGUSER }}@${{ env.PGHOST }}:${{ job.services.postgres.ports[5432] }}/${{ env.PGDATABASE }}?sslmode=${{ matrix.sslmode }} @@ -311,222 +491,180 @@ jobs: uses: actions/upload-artifact@v3 with: name: test-output - path: tests/output/* + path: | + tests/output/* + target/test_logs/* retention-days: 5 - docker: - name: Build docker images + package: + name: Package runs-on: ubuntu-latest - needs: [ build ] - env: - # PG_* variables are used by psql - PGDATABASE: test - PGHOST: localhost - PGUSER: postgres - PGPASSWORD: postgres - services: - postgres: - image: postgis/postgis:15-3.3 - ports: - # will assign a random free host port - - 5432/tcp - # Sadly there is currently no way to pass arguments to the service image other than this hack - # See also https://stackoverflow.com/a/62720566/177275 - options: >- - -e POSTGRES_DB=test - -e POSTGRES_USER=postgres - -e POSTGRES_PASSWORD=postgres - -e PGDATABASE=test - -e PGUSER=postgres - -e PGPASSWORD=postgres - --health-cmd pg_isready - --health-interval 10s - --health-timeout 5s - --health-retries 5 - --entrypoint sh - postgis/postgis:15-3.3 - -c "exec docker-entrypoint.sh postgres -c ssl=on -c ssl_cert_file=/etc/ssl/certs/ssl-cert-snakeoil.pem -c ssl_key_file=/etc/ssl/private/ssl-cert-snakeoil.key" + needs: [ lint-debug-test, docker-build-test, test-multi-os, test-with-svc ] steps: - name: Checkout sources uses: actions/checkout@v4 - - name: Setup database - run: tests/fixtures/initdb.sh - env: - PGPORT: ${{ job.services.postgres.ports[5432] }} - - - name: Set up QEMU - uses: docker/setup-qemu-action@v3 - # https://github.com/docker/setup-qemu-action - with: - platforms: linux/amd64,linux/arm64 - - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 - # https://github.com/docker/setup-buildx-action + - name: Download build artifact build-aarch64-apple-darwin + uses: actions/download-artifact@v3 with: - install: true - platforms: linux/amd64,linux/arm64 - - - run: rm -rf target_releases - - name: Download build artifact build-aarch64-unknown-linux-gnu + name: build-aarch64-apple-darwin + path: target/aarch64-apple-darwin + - name: Download build artifact build-x86_64-apple-darwin uses: actions/download-artifact@v3 with: - name: build-aarch64-unknown-linux-gnu - path: target_releases/linux/arm64 + name: build-x86_64-apple-darwin + path: target/x86_64-apple-darwin - name: Download build artifact build-x86_64-unknown-linux-gnu uses: actions/download-artifact@v3 with: name: build-x86_64-unknown-linux-gnu - path: target_releases/linux/amd64 - - name: Reset permissions - run: chmod -R +x target_releases/ + path: target/x86_64-unknown-linux-gnu - - name: Build linux/arm64 Docker image - id: docker_aarch64-unknown-linux-gnu - uses: docker/build-push-action@v5 - # https://github.com/docker/build-push-action + - name: Download cross-build artifacts + uses: actions/download-artifact@v3 with: - context: . - file: multi-platform.Dockerfile - load: true - tags: ${{ github.repository }}:linux-arm64 - platforms: linux/arm64 - - name: Test linux/arm64 Docker image - run: | - PLATFORM=linux/arm64 - TAG=${{ github.repository }}:linux-arm64 - export MBTILES_BUILD=- - export MBTILES_BIN="docker run --rm --net host --platform $PLATFORM -e DATABASE_URL -v $PWD/tests:/tests --entrypoint /usr/local/bin/mbtiles $TAG" - export MARTIN_BUILD=- - export MARTIN_BIN="docker run --rm --net host --platform $PLATFORM -e DATABASE_URL -v $PWD/tests:/tests $TAG" - tests/test.sh - env: - DATABASE_URL: postgres://${{ env.PGUSER }}:${{ env.PGUSER }}@${{ env.PGHOST }}:${{ job.services.postgres.ports[5432] }}/${{ env.PGDATABASE }}?sslmode=require + name: cross-build + path: target/cross - - name: Build linux/amd64 Docker image - id: docker_x86_64-unknown-linux-gnu - uses: docker/build-push-action@v5 - # https://github.com/docker/build-push-action + - name: Download build artifact build-x86_64-pc-windows-msvc + uses: actions/download-artifact@v3 with: - context: . - file: multi-platform.Dockerfile - load: true - tags: ${{ github.repository }}:linux-amd64 - platforms: linux/amd64 - - name: Test linux/amd64 Docker image + name: build-x86_64-pc-windows-msvc + path: target/x86_64-pc-windows-msvc + - name: Download build artifact build-debian-x86_64 + uses: actions/download-artifact@v3 + with: + name: build-debian-x86_64 + path: target/debian-x86_64 + + - name: Package run: | - PLATFORM=linux/amd64 - TAG=${{ github.repository }}:linux-amd64 - export MBTILES_BUILD=- - export MBTILES_BIN="docker run --rm --net host --platform $PLATFORM -e DATABASE_URL -v $PWD/tests:/tests --entrypoint /usr/local/bin/mbtiles $TAG" - export MARTIN_BUILD=- - export MARTIN_BIN="docker run --rm --net host --platform $PLATFORM -e DATABASE_URL -v $PWD/tests:/tests $TAG" - tests/test.sh - env: - DATABASE_URL: postgres://${{ env.PGUSER }}:${{ env.PGUSER }}@${{ env.PGHOST }}:${{ job.services.postgres.ports[5432] }}/${{ env.PGDATABASE }}?sslmode=require + set -x + + cd target + mkdir files + mv cross/* . - - name: Login to GitHub Docker registry - if: github.event_name != 'pull_request' - uses: docker/login-action@v3 - # https://github.com/docker/login-action - with: - registry: ghcr.io - username: ${{ github.actor }} - password: ${{ secrets.GITHUB_TOKEN }} + cd aarch64-apple-darwin + chmod +x martin mbtiles + tar czvf ../files/martin-Darwin-aarch64.tar.gz martin mbtiles + cd .. + + cd x86_64-apple-darwin + chmod +x martin mbtiles + tar czvf ../files/martin-Darwin-x86_64.tar.gz martin mbtiles + cd .. + + cd x86_64-unknown-linux-gnu + chmod +x martin mbtiles + tar czvf ../files/martin-Linux-x86_64.tar.gz martin mbtiles + cd .. + + cd aarch64-unknown-linux-musl + chmod +x martin mbtiles + tar czvf ../files/martin-Linux-aarch64-musl.tar.gz martin mbtiles + cd .. + + cd x86_64-unknown-linux-musl + chmod +x martin mbtiles + tar czvf ../files/martin-Linux-x86_64-musl.tar.gz martin mbtiles + cd .. + + # + # Special case for Windows + # + cd x86_64-pc-windows-msvc + 7z a ../files/martin-Windows-x86_64.zip martin.exe mbtiles.exe + cd .. + + # + # Special case for Debian .deb package + # + cd debian-x86_64 + mv debian-x86_64.deb ../files/martin-Debian-x86_64.deb + cd .. - - name: Docker meta - id: docker_meta - uses: docker/metadata-action@v5 - # https://github.com/docker/metadata-action - with: - images: ghcr.io/${{ github.repository }} - - name: Push the Docker image - if: github.event_name != 'pull_request' - uses: docker/build-push-action@v5 - with: - context: . - file: multi-platform.Dockerfile - push: true - tags: ${{ steps.docker_meta.outputs.tags }} - labels: ${{ steps.docker_meta.outputs.labels }} - platforms: linux/amd64,linux/arm64 + - name: Create Homebrew config + run: | + set -x - package: - name: Package ${{ matrix.target }} - runs-on: ${{ matrix.os }} - needs: [ test, test-legacy ] - strategy: - fail-fast: true - matrix: - include: - - target: aarch64-apple-darwin - os: ubuntu-latest - name: martin-Darwin-aarch64.tar.gz - cross: 'true' - sha: 'true' - - target: aarch64-unknown-linux-gnu - os: ubuntu-latest - name: martin-Linux-aarch64.tar.gz - cross: 'true' - - target: x86_64-apple-darwin - os: macOS-latest - name: martin-Darwin-x86_64.tar.gz - sha: 'true' - - target: x86_64-pc-windows-msvc - os: windows-latest - name: martin-Windows-x86_64.zip - ext: '.exe' - - target: x86_64-unknown-linux-gnu - os: ubuntu-latest - name: martin-Linux-x86_64.tar.gz - - target: debian-x86_64 - os: ubuntu-latest - name: martin-Debian-x86_64.deb - cross: 'true' - steps: - - name: Checkout sources - uses: actions/checkout@v4 - - name: Download build artifact build-${{ matrix.target }} - uses: actions/download-artifact@v3 + # Extract Github release version only without the "v" prefix + MARTIN_VERSION=$(echo "${{ github.ref }}" | sed -e 's/refs\/tags\/v//') + + mkdir -p target/homebrew + cd target + + cat << EOF > homebrew_config.yaml + version: "$MARTIN_VERSION" + macos_arm_sha256: "$(shasum -a 256 files/martin-Darwin-aarch64.tar.gz | cut -d' ' -f1)" + macos_intel_sha256: "$(shasum -a 256 files/martin-Darwin-x86_64.tar.gz | cut -d' ' -f1)" + linux_arm_sha256: "$(shasum -a 256 files/martin-Linux-aarch64-musl.tar.gz | cut -d' ' -f1)" + linux_intel_sha256: "$(shasum -a 256 files/martin-Linux-x86_64-musl.tar.gz | cut -d' ' -f1)" + EOF + + - name: Save Homebrew Config + uses: actions/upload-artifact@v3 with: - name: build-${{ matrix.target }} - path: target/ - - name: Strip symbols - # Symbol stripping does not work cross-platform - # For cross, symbols were already removed during build - if: matrix.cross != 'true' - run: | - cd target/ - strip martin${{ matrix.ext }} - strip mbtiles${{ matrix.ext }} - - name: Package - run: | - cd target/ - if [[ "${{ runner.os }}" == "Windows" ]]; then - 7z a ../${{ matrix.name }} martin${{ matrix.ext }} mbtiles${{ matrix.ext }} - elif [[ "${{ matrix.target }}" == "debian-x86_64" ]]; then - mv debian-x86_64.deb ../${{ matrix.name }} - else - tar czvf ../${{ matrix.name }} martin${{ matrix.ext }} mbtiles${{ matrix.ext }} - fi - - name: Generate SHA-256 (MacOS) - if: matrix.sha == 'true' - run: shasum -a 256 ${{ matrix.name }} + name: homebrew-config + path: target/homebrew_config.yaml + - name: Publish if: startsWith(github.ref, 'refs/tags/') uses: softprops/action-gh-release@v1 with: draft: true - files: 'martin*' + files: 'target/files/*' body_path: CHANGELOG.md env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + - name: Checkout maplibre/homebrew-martin + if: startsWith(github.ref, 'refs/tags/') + uses: actions/checkout@v4 + with: + repository: maplibre/homebrew-martin + token: ${{ secrets.GH_HOMEBREW_MARTIN_TOKEN }} + path: target/homebrew + + - name: Create Homebrew formula + uses: ajeffowens/jinja2-action@90dab3da2215932ea86d2875224f06bbd6798617 # v2.0.0 + with: + template: .github/templates/homebrew.martin.rb.j2 + output_file: target/homebrew/martin.rb + data_file: target/homebrew_config.yaml + + - name: Create a PR for maplibre/homebrew-martin + if: startsWith(github.ref, 'refs/tags/') + uses: peter-evans/create-pull-request@v5 + with: + # Create a personal access token + # Gen: https://github.com/settings/personal-access-tokens/new + # Set: https://github.com/maplibre/martin/settings/secrets/actions/GH_HOMEBREW_MARTIN_TOKEN + # Docs: https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/managing-your-personal-access-tokens#creating-a-fine-grained-personal-access-token + # Name: anything descriptive + # One year long (sadly that's max) + # Repository owner and repo: maplibre/homebrew-martin + # Access Contents: Read and write + # Access Metadata: Read-only + # Access Pull requests: Read and write + token: ${{ secrets.GH_HOMEBREW_MARTIN_TOKEN }} + commit-message: "Update to ${{ github.ref }}" + title: "Update to ${{ github.ref }}" + body: "Update to ${{ github.ref }}" + branch: "update-to-${{ github.ref }}" + branch-suffix: timestamp + base: "main" + labels: "auto-update" + assignees: "nyurik" + draft: false + delete-branch: true + path: target/homebrew + # This final step is needed to mark the whole workflow as successful # Don't change its name - it is used by the merge protection rules done: name: CI Finished runs-on: ubuntu-latest - needs: [ docker, package ] + needs: [ package ] steps: - name: Finished run: echo "CI finished successfully" diff --git a/.github/workflows/dependabot.yml b/.github/workflows/dependabot.yml index 4e7dfc47f..2a6b0bc5d 100644 --- a/.github/workflows/dependabot.yml +++ b/.github/workflows/dependabot.yml @@ -6,7 +6,7 @@ permissions: write-all jobs: dependabot: runs-on: ubuntu-latest - if: ${{ github.actor == 'dependabot[bot]' }} + if: github.actor == 'dependabot[bot]' steps: - name: Dependabot metadata id: metadata @@ -14,13 +14,13 @@ jobs: with: github-token: "${{ secrets.GITHUB_TOKEN }}" - name: Approve Dependabot PRs - if: ${{steps.metadata.outputs.update-type == 'version-update:semver-patch'}} + if: steps.metadata.outputs.update-type == 'version-update:semver-patch' run: gh pr review --approve "$PR_URL" env: PR_URL: ${{github.event.pull_request.html_url}} GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} - name: Enable auto-merge for Dependabot PRs - if: ${{steps.metadata.outputs.update-type == 'version-update:semver-patch'}} + if: steps.metadata.outputs.update-type == 'version-update:semver-patch' run: gh pr merge --auto --squash "$PR_URL" env: PR_URL: ${{github.event.pull_request.html_url}} diff --git a/.github/workflows/grcov.yml b/.github/workflows/grcov.yml index 23287dc46..d0fec67cb 100644 --- a/.github/workflows/grcov.yml +++ b/.github/workflows/grcov.yml @@ -7,14 +7,12 @@ on: - '**.md' - 'demo/**' - 'docs/**' - - 'homebrew-formula/**' pull_request: branches: [ main ] paths-ignore: - '**.md' - 'demo/**' - 'docs/**' - - 'homebrew-formula/**' workflow_dispatch: jobs: @@ -41,6 +39,7 @@ jobs: steps: - name: Checkout sources uses: actions/checkout@v4 + - uses: Swatinem/rust-cache@v2 - name: Setup database run: | @@ -52,9 +51,12 @@ jobs: - name: Install nightly toolchain uses: dtolnay/rust-toolchain@master with: - toolchain: nightly + toolchain: nightly-2023-11-03 override: true + - name: Cleanup GCDA files + run: rm -rf martin/target/debug/deps/*.gcda + - name: Run tests run: cargo test env: @@ -71,9 +73,11 @@ jobs: uses: codecov/codecov-action@v3 with: file: ${{ steps.coverage.outputs.report }} + env: + CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} - name: Check conditional cfg values run: | - cargo +nightly check -Z unstable-options -Z check-cfg=features,names,values,output --workspace + cargo check -Z unstable-options -Z check-cfg --workspace env: RUSTFLAGS: '-D warnings' diff --git a/CHANGELOG.md b/CHANGELOG.md index ea88cc71b..f75f825e6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,9 @@ +# ATTENTION +This file is currently not maintained. See [release](https://github.com/maplibre/martin/releases) instead. + ## [Unreleased] - ReleaseDate ### ⚠ BREAKING CHANGES diff --git a/Cargo.lock b/Cargo.lock index ab6549937..ce1e06595 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2,31 +2,6 @@ # It is not intended for manual editing. version = 3 -[[package]] -name = "actix" -version = "0.13.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cba56612922b907719d4a01cf11c8d5b458e7d3dba946d0435f20f58d6795ed2" -dependencies = [ - "actix-macros", - "actix-rt", - "actix_derive", - "bitflags 2.4.0", - "bytes", - "crossbeam-channel", - "futures-core", - "futures-sink", - "futures-task", - "futures-util", - "log", - "once_cell", - "parking_lot", - "pin-project-lite", - "smallvec", - "tokio", - "tokio-util", -] - [[package]] name = "actix-codec" version = "0.5.1" @@ -71,7 +46,7 @@ dependencies = [ "actix-utils", "ahash", "base64", - "bitflags 2.4.0", + "bitflags 2.4.1", "brotli", "bytes", "bytestring", @@ -95,7 +70,7 @@ dependencies = [ "tokio", "tokio-util", "tracing", - "zstd", + "zstd 0.12.4", ] [[package]] @@ -105,7 +80,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e01ed3140b2f8d422c68afa1ed2e85d996ea619c988ac834d255db32138655cb" dependencies = [ "quote", - "syn 2.0.37", + "syn 2.0.39", ] [[package]] @@ -219,18 +194,7 @@ dependencies = [ "actix-router", "proc-macro2", "quote", - "syn 2.0.37", -] - -[[package]] -name = "actix_derive" -version = "0.6.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7c7db3d5a9718568e4cf4a537cfd7070e6e6ff7481510d0237fb529ac850f6d3" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.37", + "syn 2.0.39", ] [[package]] @@ -250,21 +214,22 @@ checksum = "f26201604c87b1e01bd3d98f8d5d9a8fcbb815e8cedb41ffccbeb4bf593a35fe" [[package]] name = "ahash" -version = "0.8.3" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c99f64d1e06488f620f932677e24bc6e2897582980441ae90a671415bd7ec2f" +checksum = "91429305e9f0a25f6205c5b8e0d2db09e0708a7a6df0f42212bb56c32c8ac97a" dependencies = [ "cfg-if", "getrandom", "once_cell", "version_check", + "zerocopy", ] [[package]] name = "aho-corasick" -version = "1.1.1" +version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea5d730647d4fadd988536d06fecce94b7b4f2a7efdae548f1cf4b63205518ab" +checksum = "b2969dcb958b36655471fc61f7e416fa76033bdd4bfed0678d8fee1e2d07a1f0" dependencies = [ "memchr", ] @@ -298,9 +263,9 @@ checksum = "4b46cbb362ab8752921c97e041f5e366ee6297bd428a31275b9fcf1e380f7299" [[package]] name = "anstream" -version = "0.5.0" +version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b1f58811cfac344940f1a400b6e6231ce35171f614f26439e80f8c1465c5cc0c" +checksum = "2ab91ebe16eb252986481c5b62f6098f3b698a45e34b5b98200cf20dd2484a44" dependencies = [ "anstyle", "anstyle-parse", @@ -312,15 +277,15 @@ dependencies = [ [[package]] name = "anstyle" -version = "1.0.3" +version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b84bf0a05bbb2a83e5eb6fa36bb6e87baa08193c35ff52bbf6b38d8af2890e46" +checksum = "7079075b41f533b8c61d2a4d073c4676e1f8b249ff94a393b0595db304e0dd87" [[package]] name = "anstyle-parse" -version = "0.2.1" +version = "0.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "938874ff5980b03a87c5524b3ae5b59cf99b1d6bc836848df7bc5ada9643c333" +checksum = "317b9a89c1868f5ea6ff1d9539a69f45dffc21ce321ac1fd1160dfa48c8e2140" dependencies = [ "utf8parse", ] @@ -331,17 +296,17 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5ca11d4be1bab0c8bc8734a9aa7bf4ee8316d462a08c6ac5052f888fef5b494b" dependencies = [ - "windows-sys", + "windows-sys 0.48.0", ] [[package]] name = "anstyle-wincon" -version = "2.1.0" +version = "3.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "58f54d10c6dfa51283a066ceab3ec1ab78d13fae00aa49243a45e4571fb79dfd" +checksum = "f0699d10d2f4d628a98ee7b57b289abbc98ff3bad977cb3152709d4bf2330628" dependencies = [ "anstyle", - "windows-sys", + "windows-sys 0.48.0", ] [[package]] @@ -362,26 +327,11 @@ version = "0.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "96d30a06541fbafbc7f82ed10c06164cfbd2c401138f6addd8404629c4b16711" -[[package]] -name = "assert_fs" -version = "1.0.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f070617a68e5c2ed5d06ee8dd620ee18fb72b99f6c094bed34cf8ab07c875b48" -dependencies = [ - "anstyle", - "doc-comment", - "globwalk", - "predicates", - "predicates-core", - "predicates-tree", - "tempfile", -] - [[package]] name = "async-compression" -version = "0.4.3" +version = "0.4.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb42b2197bf15ccb092b62c74515dbd8b86d0effd934795f6687c93b6e679a2c" +checksum = "f658e2baef915ba0f26f1f7c42bfb8e12f532a01f449a090ded75ae7a07e9ba2" dependencies = [ "brotli", "flate2", @@ -389,8 +339,8 @@ dependencies = [ "memchr", "pin-project-lite", "tokio", - "zstd", - "zstd-safe", + "zstd 0.13.0", + "zstd-safe 7.0.0", ] [[package]] @@ -401,18 +351,18 @@ checksum = "5fd55a5ba1179988837d24ab4c7cc8ed6efdeff578ede0416b4225a5fca35bd0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.37", + "syn 2.0.39", ] [[package]] name = "async-trait" -version = "0.1.73" +version = "0.1.74" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bc00ceb34980c03614e35a3a4e218276a0a824e911d07651cd0d858a51e8c0f0" +checksum = "a66537f1bb974b254c98ed142ff995236e81b9d0fe4db0575f46612cb15eb0f9" dependencies = [ "proc-macro2", "quote", - "syn 2.0.37", + "syn 2.0.39", ] [[package]] @@ -447,9 +397,9 @@ dependencies = [ [[package]] name = "base64" -version = "0.21.4" +version = "0.21.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ba43ea6f343b788c8764558649e08df62f86c6ef251fdaeb1ffd010a9ae50a2" +checksum = "35636a1494ede3b646cc98f74f8e62c773a38a659ebc777a2cf26b9b74171df9" [[package]] name = "base64ct" @@ -457,6 +407,21 @@ version = "1.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b" +[[package]] +name = "bit-set" +version = "0.5.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0700ddab506f33b20a03b13996eccd309a48e5ff77d0d95926aa0210fb4e95f1" +dependencies = [ + "bit-vec", +] + +[[package]] +name = "bit-vec" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "349f9b6a179ed607305526ca489b34ad0a41aed5f7980fa90eb03160b69598fb" + [[package]] name = "bitflags" version = "1.3.2" @@ -465,9 +430,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.4.0" +version = "2.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4682ae6287fcf752ecaabbfcc7b6f9b72aa33933dc23a554d853aea8eea8635" +checksum = "327762f6e5a765692301e5bb513e0d9fef63be86bbc14528052b1cd3e6f03e07" dependencies = [ "serde", ] @@ -495,9 +460,9 @@ dependencies = [ [[package]] name = "brotli" -version = "3.3.4" +version = "3.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a1a0b1dbcc8ae29329621f8d4f0d835787c1c38bb1401979b49d13b0b305ff68" +checksum = "516074a47ef4bce09577a3b379392300159ce5b1ba2e501ff1c819950066100f" dependencies = [ "alloc-no-stdlib", "alloc-stdlib", @@ -506,24 +471,14 @@ dependencies = [ [[package]] name = "brotli-decompressor" -version = "2.3.4" +version = "2.5.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b6561fd3f895a11e8f72af2cb7d22e08366bebc2b6b57f7744c4bda27034744" +checksum = "4e2e4afe60d7dd600fdd3de8d0f08c2b7ec039712e3b6137ff98b7004e82de4f" dependencies = [ "alloc-no-stdlib", "alloc-stdlib", ] -[[package]] -name = "bstr" -version = "1.6.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4c2f7349907b712260e64b0afe2f84692af14a454be26187d9df565c7f69266a" -dependencies = [ - "memchr", - "serde", -] - [[package]] name = "bumpalo" version = "3.14.0" @@ -538,9 +493,9 @@ checksum = "374d28ec25809ee0e23827c2ab573d729e293f281dfe393500e7ad618baa61c6" [[package]] name = "byteorder" -version = "1.4.3" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14c189c53d098945499cdfa7ecc63567cf3886b3332b312a5b4585d8d3a6a610" +checksum = "1fd0f2584146f6f2ef48085050886acf353beff7305ebd1ae69500e27c67f64b" [[package]] name = "bytes" @@ -550,9 +505,9 @@ checksum = "a2bd12c1caf447e69cd4528f47f94d203fd2582878ecb9e9465484c4148a8223" [[package]] name = "bytestring" -version = "1.3.0" +version = "1.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "238e4886760d98c4f899360c834fa93e62cf7f721ac3c2da375cbdf4b8679aae" +checksum = "74d80203ea6b29df88012294f62733de21cfeab47f17b41af3a38bc30a03ee72" dependencies = [ "bytes", ] @@ -614,9 +569,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.4.5" +version = "4.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "824956d0dca8334758a5b7f7e50518d66ea319330cbceedcf76905c2f6ab30e3" +checksum = "ac495e00dcec98c83465d5ad66c5c4fabd652fd6686e7c6269b117e729a6f17b" dependencies = [ "clap_builder", "clap_derive", @@ -624,9 +579,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.4.5" +version = "4.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "122ec64120a49b4563ccaedcbea7818d069ed8e9aa6d829b82d8a4128936b2ab" +checksum = "c77ed9a32a62e6ca27175d00d29d05ca32e396ea1eb5fb01d8256b669cec7663" dependencies = [ "anstream", "anstyle", @@ -636,21 +591,21 @@ dependencies = [ [[package]] name = "clap_derive" -version = "4.4.2" +version = "4.4.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0862016ff20d69b84ef8247369fabf5c008a7417002411897d40ee1f4532b873" +checksum = "cf9804afaaf59a91e75b022a30fb7229a7901f60c755489cc61c9b423b836442" dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.37", + "syn 2.0.39", ] [[package]] name = "clap_lex" -version = "0.5.1" +version = "0.6.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd7cc57abe963c6d3b9d8be5b06ba7c8957a930305ca90304f24ef040aa6f961" +checksum = "702fc72eb24e5a1e48ce58027a675bc24edd52096d5397d4aea7c6dd9eca0bd1" [[package]] name = "color_quant" @@ -664,6 +619,18 @@ version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "acbf1af155f9b9ef647e42cdc158db4b64a1b61f743629225fde6f3e0be2a7c7" +[[package]] +name = "console" +version = "0.15.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c926e00cc70edefdc64d3a5ff31cc65bb97a3460097762bd23afb4d8145fccf8" +dependencies = [ + "encode_unicode", + "lazy_static", + "libc", + "windows-sys 0.45.0", +] + [[package]] name = "const-oid" version = "0.9.5" @@ -676,6 +643,15 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6245d59a3e82a7fc217c5828a6692dbc6dfb63a0c8c90495621f7b9d79704a0e" +[[package]] +name = "convert_case" +version = "0.6.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ec182b0ca2f35d8fc196cf3404988fd8b8c739a4d270ff118a398feb0cbec1ca" +dependencies = [ + "unicode-segmentation", +] + [[package]] name = "cookie" version = "0.16.2" @@ -705,9 +681,9 @@ checksum = "e496a50fda8aacccc86d7529e2c1e0892dbd0f898a6b5645b5561b89c3210efa" [[package]] name = "cpufeatures" -version = "0.2.9" +version = "0.2.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a17b76ff3a4162b0b27f354a0c87015ddad39d35f9c0c36607a3bdd175dde1f1" +checksum = "ce420fe07aecd3e67c5f910618fe65e94158f6dcc0adf44e00d69ce2bdfe0fd0" dependencies = [ "libc", ] @@ -723,9 +699,9 @@ dependencies = [ [[package]] name = "crc-catalog" -version = "2.2.0" +version = "2.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9cace84e55f07e7301bae1c519df89cdad8cc3cd868413d3fdbdeca9ff3db484" +checksum = "19d374276b40fb8bbdee95aef7c7fa6b5316ec764510eb64b8dd0e2ed0d7e7f5" [[package]] name = "crc32fast" @@ -850,7 +826,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "37e366bff8cd32dd8754b0991fb66b279dc48f598c3a18914852a6673deef583" dependencies = [ "quote", - "syn 2.0.37", + "syn 2.0.39", ] [[package]] @@ -905,9 +881,12 @@ dependencies = [ [[package]] name = "deranged" -version = "0.3.8" +version = "0.3.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f2696e8a945f658fd14dc3b87242e6b80cd0f36ff04ea560fa39082368847946" +checksum = "0f32d04922c60427da6f9fef14d042d9edddef64cb9d4ce0d64d0685fbeb1fd3" +dependencies = [ + "powerfmt", +] [[package]] name = "derive_more" @@ -915,7 +894,7 @@ version = "0.99.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321" dependencies = [ - "convert_case", + "convert_case 0.4.0", "proc-macro2", "quote", "rustc_version", @@ -923,10 +902,10 @@ dependencies = [ ] [[package]] -name = "difflib" -version = "0.4.0" +name = "diff" +version = "0.1.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6184e33543162437515c2e2b48714794e37845ec9851711914eec9d308f6ebe8" +checksum = "56254986775e3233ffa9c4d7d3faaf6d36a2c09d30b20687e9f88bc8bafc16c8" [[package]] name = "digest" @@ -940,12 +919,6 @@ dependencies = [ "subtle", ] -[[package]] -name = "doc-comment" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fea41bba32d969b513997752735605054bc0dfa92b4c56bf1189f2e174be7a10" - [[package]] name = "dotenvy" version = "0.15.7" @@ -961,6 +934,12 @@ dependencies = [ "serde", ] +[[package]] +name = "encode_unicode" +version = "0.3.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a357d28ed41a50f9c765dbfe56cbc04a64e53e5fc58ba79fbc34c10ef3df831f" + [[package]] name = "encoding_rs" version = "0.8.33" @@ -970,6 +949,26 @@ dependencies = [ "cfg-if", ] +[[package]] +name = "enum-display" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "96d4df33d54dd1959d177a0e2c2f4e5a8637a3054aa56861ed7e173ad2043fe2" +dependencies = [ + "enum-display-macro", +] + +[[package]] +name = "enum-display-macro" +version = "0.1.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a0ce3a36047ede676eb0d2721d065beed8410cf4f113f489604d2971331cb378" +dependencies = [ + "convert_case 0.6.0", + "quote", + "syn 1.0.109", +] + [[package]] name = "enum_dispatch" version = "0.3.12" @@ -979,7 +978,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.37", + "syn 2.0.39", ] [[package]] @@ -1003,23 +1002,12 @@ checksum = "5443807d6dff69373d433ab9ef5378ad8df50ca6298caf15de6e52e24aaf54d5" [[package]] name = "errno" -version = "0.3.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "136526188508e25c6fef639d7927dfb3e0e3084488bf202267829cf7fc23dbdd" -dependencies = [ - "errno-dragonfly", - "libc", - "windows-sys", -] - -[[package]] -name = "errno-dragonfly" -version = "0.1.2" +version = "0.3.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "aa68f1b12764fab894d2755d2518754e71b4fd80ecfb822714a1206c2aab39bf" +checksum = "ac3e13f66a2f95e32a39eaa81f6b95d42878ca0e1db0c7543723dfe12557e860" dependencies = [ - "cc", "libc", + "windows-sys 0.48.0", ] [[package]] @@ -1030,7 +1018,7 @@ checksum = "136d1b5283a1ab77bd9257427ffd09d8667ced0570b6f938942bc7568ed5b943" dependencies = [ "cfg-if", "home", - "windows-sys", + "windows-sys 0.48.0", ] [[package]] @@ -1039,12 +1027,6 @@ version = "2.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0206175f82b8d6bf6652ff7d71a1e27fd2e4efde587fd368662814d6ec1d9ce0" -[[package]] -name = "exitcode" -version = "1.1.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "de853764b47027c2e862a995c34978ffa63c1501f2e15f987ba11bd4f9bba193" - [[package]] name = "fallible-iterator" version = "0.2.0" @@ -1065,9 +1047,9 @@ checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5" [[package]] name = "fdeflate" -version = "0.3.0" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d329bdeac514ee06249dabc27877490f17f5d371ec693360768b838e19f3ae10" +checksum = "64d6dafc854908ff5da46ff3f8f473c6984119a2876a383a860246dd7841a868" dependencies = [ "simd-adler32", ] @@ -1080,8 +1062,8 @@ checksum = "d4029edd3e734da6fe05b6cd7bd2960760a616bd2ddd0d59a0124746d6272af0" dependencies = [ "cfg-if", "libc", - "redox_syscall", - "windows-sys", + "redox_syscall 0.3.5", + "windows-sys 0.48.0", ] [[package]] @@ -1092,9 +1074,9 @@ checksum = "8fcfdc7a0362c9f4444381a9e697c79d435fe65b52a37466fc2c1184cee9edc6" [[package]] name = "flate2" -version = "1.0.27" +version = "1.0.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c6c98ee8095e9d1dcbf2fcc6d95acccb90d1c81db1e44725c6a984b1dbdfb010" +checksum = "46303f565772937ffe1d394a4fac6f411c6013172fadde9dcdb1e147a086940e" dependencies = [ "crc32fast", "miniz_oxide", @@ -1164,27 +1146,33 @@ dependencies = [ ] [[package]] -name = "foreign-types" -version = "0.3.2" +name = "form_urlencoded" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f6f339eb8adc052cd2ca78910fda869aefa38d22d5cb648e6485e4d3fc06f3b1" +checksum = "a62bc1cf6f830c2ec14a513a9fb124d0a213a629668a4186f329db21fe045652" dependencies = [ - "foreign-types-shared", + "percent-encoding", ] [[package]] -name = "foreign-types-shared" -version = "0.1.1" +name = "freetype-rs" +version = "0.32.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" +checksum = "d59c337e64822dd56a3a83ed75a662a470736bdb3a9fabfb588dff276b94a4e0" +dependencies = [ + "bitflags 1.3.2", + "freetype-sys", + "libc", +] [[package]] -name = "form_urlencoded" -version = "1.2.0" +name = "freetype-sys" +version = "0.17.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a62bc1cf6f830c2ec14a513a9fb124d0a213a629668a4186f329db21fe045652" +checksum = "643148ca6cbad6bec384b52fbe1968547d578c4efe83109e035c43a71734ff88" dependencies = [ - "percent-encoding", + "cc", + "libc", ] [[package]] @@ -1196,7 +1184,7 @@ dependencies = [ "async-trait", "rustix", "tokio", - "windows-sys", + "windows-sys 0.48.0", ] [[package]] @@ -1207,9 +1195,9 @@ checksum = "e6d5a32815ae3f33302d95fdcb2ce17862f8c65363dcfd29360480ba1001fc9c" [[package]] name = "futures" -version = "0.3.28" +version = "0.3.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23342abe12aba583913b2e62f22225ff9c950774065e4bfb61a19cd9770fec40" +checksum = "da0290714b38af9b4a7b094b8a37086d1b4e61f2df9122c3cad2577669145335" dependencies = [ "futures-channel", "futures-core", @@ -1222,9 +1210,9 @@ dependencies = [ [[package]] name = "futures-channel" -version = "0.3.28" +version = "0.3.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "955518d47e09b25bbebc7a18df10b81f0c766eaf4c4f1cccef2fca5f2a4fb5f2" +checksum = "ff4dd66668b557604244583e3e1e1eada8c5c2e96a6d0d6653ede395b78bbacb" dependencies = [ "futures-core", "futures-sink", @@ -1232,15 +1220,15 @@ dependencies = [ [[package]] name = "futures-core" -version = "0.3.28" +version = "0.3.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4bca583b7e26f571124fe5b7561d49cb2868d79116cfa0eefce955557c6fee8c" +checksum = "eb1d22c66e66d9d72e1758f0bd7d4fd0bee04cad842ee34587d68c07e45d088c" [[package]] name = "futures-executor" -version = "0.3.28" +version = "0.3.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ccecee823288125bd88b4d7f565c9e58e41858e47ab72e8ea2d64e93624386e0" +checksum = "0f4fb8693db0cf099eadcca0efe2a5a22e4550f98ed16aba6c48700da29597bc" dependencies = [ "futures-core", "futures-task", @@ -1260,38 +1248,44 @@ dependencies = [ [[package]] name = "futures-io" -version = "0.3.28" +version = "0.3.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4fff74096e71ed47f8e023204cfd0aa1289cd54ae5430a9523be060cdb849964" +checksum = "8bf34a163b5c4c52d0478a4d757da8fb65cabef42ba90515efee0f6f9fa45aaa" [[package]] name = "futures-macro" -version = "0.3.28" +version = "0.3.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72" +checksum = "53b153fd91e4b0147f4aced87be237c98248656bb01050b96bf3ee89220a8ddb" dependencies = [ "proc-macro2", "quote", - "syn 2.0.37", + "syn 2.0.39", ] [[package]] name = "futures-sink" -version = "0.3.28" +version = "0.3.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f43be4fe21a13b9781a69afa4985b0f6ee0e1afab2c6f454a8cf30e2b2237b6e" +checksum = "e36d3378ee38c2a36ad710c5d30c2911d752cb941c00c72dbabfb786a7970817" [[package]] name = "futures-task" -version = "0.3.28" +version = "0.3.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "efd193069b0ddadc69c46389b740bbccdd97203899b48d09c5f7969591d6bae2" + +[[package]] +name = "futures-timer" +version = "3.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76d3d132be6c0e6aa1534069c705a74a5997a356c0dc2f86a47765e5617c5b65" +checksum = "e64b03909df88034c26dc1547e8970b91f98bdb65165d6a4e9110d94263dbb2c" [[package]] name = "futures-util" -version = "0.3.28" +version = "0.3.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "26b01e40b772d54cf6c6d721c1d1abd0647a0106a12ecaa1c186273392a69533" +checksum = "a19526d624e703a3179b3d322efec918b6246ea0fa51d41124525f00f1cc8104" dependencies = [ "futures-channel", "futures-core", @@ -1343,28 +1337,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "6fb8d784f27acf97159b40fc4db5ecd8aa23b9ad5ef69cdd136d3bc80665f0c0" [[package]] -name = "globset" -version = "0.4.13" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "759c97c1e17c55525b57192c06a267cda0ac5210b222d6b82189a2338fa1c13d" -dependencies = [ - "aho-corasick", - "bstr", - "fnv", - "log", - "regex", -] - -[[package]] -name = "globwalk" -version = "0.8.1" +name = "glob" +version = "0.3.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93e3af942408868f6934a7b85134a3230832b9977cf66125df2f9edcfce4ddcc" -dependencies = [ - "bitflags 1.3.2", - "ignore", - "walkdir", -] +checksum = "d2fabcfbdc87f4758337ca535fb41a6d701b65693ce38287d856d1674551ec9b" [[package]] name = "h2" @@ -1399,9 +1375,9 @@ checksum = "8a9ee70c43aaf417c914396645a0fa852624801b24ebb7ae78fe8272889ac888" [[package]] name = "hashbrown" -version = "0.14.0" +version = "0.14.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2c6201b9ff9fd90a5a3bac2e56a830d0caa509576f0e503818ee82c181b3437a" +checksum = "f93e7192158dbcda357bdec5fb5788eebf8bbac027f3f33e719d29135ae84156" dependencies = [ "ahash", "allocator-api2", @@ -1413,7 +1389,7 @@ version = "0.8.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "e8094feaf31ff591f651a2664fb9cfd92bba7a60ce3197265e9482ebe753c8f7" dependencies = [ - "hashbrown 0.14.0", + "hashbrown 0.14.2", ] [[package]] @@ -1467,7 +1443,7 @@ version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5444c27eef6923071f7ebcc33e3444508466a76f7a2b93da00ed6e19f30c1ddb" dependencies = [ - "windows-sys", + "windows-sys 0.48.0", ] [[package]] @@ -1509,23 +1485,6 @@ dependencies = [ "unicode-normalization", ] -[[package]] -name = "ignore" -version = "0.4.20" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dbe7873dab538a9a44ad79ede1faf5f30d49f9a5c883ddbab48bce81b64b7492" -dependencies = [ - "globset", - "lazy_static", - "log", - "memchr", - "regex", - "same-file", - "thread_local", - "walkdir", - "winapi-util", -] - [[package]] name = "image" version = "0.24.7" @@ -1559,12 +1518,12 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.0.1" +version = "2.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad227c3af19d4914570ad36d30409928b75967c298feb9ea1969db3a610bb14e" +checksum = "d530e1a18b1cb4c484e6e34556a0d948706958449fca0cab753d649f2bce3d1f" dependencies = [ "equivalent", - "hashbrown 0.14.0", + "hashbrown 0.14.2", ] [[package]] @@ -1573,6 +1532,21 @@ version = "2.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1e186cfbae8084e513daff4240b4797e342f988cecda4fb6c939150f96315fd8" +[[package]] +name = "insta" +version = "1.34.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5d64600be34b2fcfc267740a243fa7744441bb4947a619ac4e5bb6507f35fbfc" +dependencies = [ + "console", + "lazy_static", + "linked-hash-map", + "serde", + "similar", + "toml", + "yaml-rust", +] + [[package]] name = "is-terminal" version = "0.4.9" @@ -1581,7 +1555,7 @@ checksum = "cb0889898416213fab133e1d33a0e5858a48177452750691bde3666d0fdbaf8b" dependencies = [ "hermit-abi", "rustix", - "windows-sys", + "windows-sys 0.48.0", ] [[package]] @@ -1610,9 +1584,9 @@ checksum = "af150ab688ff2122fcef229be89cb50dd66af9e01a4ff320cc137eecc9bacc38" [[package]] name = "jobserver" -version = "0.1.26" +version = "0.1.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "936cfd212a0155903bcbc060e316fb6cc7cbf2e1907329391ebadc1fe0ce77c2" +checksum = "8c37f63953c4c63420ed5fd3d6d398c719489b9f872b9fa683262f8edd363c7d" dependencies = [ "libc", ] @@ -1625,18 +1599,18 @@ checksum = "bc0000e42512c92e31c2252315bda326620a4e034105e900c98ec492fa077b3e" [[package]] name = "js-sys" -version = "0.3.64" +version = "0.3.65" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c5f195fe497f702db0f318b07fdd68edb16955aed830df8363d837542f8f935a" +checksum = "54c0c35952f67de54bb584e9fd912b3023117cbafc0a77d8f3dee1fb5f572fe8" dependencies = [ "wasm-bindgen", ] [[package]] name = "json-patch" -version = "1.1.0" +version = "1.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4f7765dccf8c39c3a470fc694efe322969d791e713ca46bc7b5c506886157572" +checksum = "55ff1e1486799e3f64129f8ccad108b38290df9cd7015cd31bed17239f0789d6" dependencies = [ "serde", "serde_json", @@ -1670,9 +1644,9 @@ dependencies = [ [[package]] name = "libc" -version = "0.2.148" +version = "0.2.150" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9cdc71e17332e86d2e1d38c1f99edcb6288ee11b815fb1a4b049eaa2114d369b" +checksum = "89d92a4743f9a61002fae18374ed11e7973f530cb3a3255fb354818118b2203c" [[package]] name = "libdeflate-sys" @@ -1694,9 +1668,9 @@ dependencies = [ [[package]] name = "libm" -version = "0.2.7" +version = "0.2.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f7012b1bbb0719e1097c47611d3898568c546d597c2e74d66f6087edd5233ff4" +checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058" [[package]] name = "libsqlite3-sys" @@ -1709,17 +1683,22 @@ dependencies = [ "vcpkg", ] +[[package]] +name = "linked-hash-map" +version = "0.5.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0717cef1bc8b636c6e1c1bbdefc09e6322da8a9321966e8928ef80d20f7f770f" + [[package]] name = "linux-raw-sys" version = "0.4.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "da2479e8c062e40bf0066ffa0bc823de0a9368974af99c9f6df941d2c231e03f" [[package]] name = "local-channel" -version = "0.1.4" +version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e0a493488de5f18c8ffcba89eebb8532ffc562dc400490eb65b84893fae0b178" +checksum = "b6cbc85e69b8df4b8bb8b89ec634e7189099cea8927a276b7384ce5488e53ec8" dependencies = [ "futures-core", "futures-sink", @@ -1728,15 +1707,15 @@ dependencies = [ [[package]] name = "local-waker" -version = "0.1.3" +version = "0.1.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e34f76eb3611940e0e7d53a9aaa4e6a3151f69541a282fd0dad5571420c53ff1" +checksum = "4d873d7c67ce09b42110d801813efbc9364414e356be9935700d368351657487" [[package]] name = "lock_api" -version = "0.4.10" +version = "0.4.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c1cc9717a20b1bb222f333e6a92fd32f7d8a18ddc5a3191a11af45dcbf4dcd16" +checksum = "3c168f8615b12bc01f9c17e2eb0cc07dcae1940121185446edc3744920e8ef45" dependencies = [ "autocfg", "scopeguard", @@ -1750,14 +1729,14 @@ checksum = "b5e6163cb8c49088c2c36f57875e58ccd8c87c7427f7fbd50ea6710b2f3f2e8f" [[package]] name = "martin" -version = "0.9.0-pre.1" +version = "0.10.1" dependencies = [ - "actix", "actix-cors", "actix-http", "actix-rt", "actix-web", "async-trait", + "bit-set", "brotli", "cargo-husky", "clap", @@ -1768,19 +1747,22 @@ dependencies = [ "flate2", "futures", "indoc", + "insta", "itertools 0.11.0", "json-patch", "log", - "martin-mbtiles", "martin-tile-utils", + "mbtiles", "num_cpus", - "openssl", + "pbf_font_tools", "pmtiles", "postgis", "postgres", - "postgres-openssl", "postgres-protocol", "regex", + "rustls", + "rustls-native-certs", + "rustls-pemfile", "semver", "serde", "serde_json", @@ -1790,18 +1772,29 @@ dependencies = [ "thiserror", "tilejson", "tokio", + "tokio-postgres-rustls", ] [[package]] -name = "martin-mbtiles" -version = "0.4.0" +name = "martin-tile-utils" +version = "0.1.4" + +[[package]] +name = "mbtiles" +version = "0.7.2" dependencies = [ "actix-rt", "anyhow", "clap", + "ctor", + "enum-display", + "env_logger", "futures", + "insta", "log", "martin-tile-utils", + "pretty_assertions", + "rstest", "serde", "serde_json", "serde_yaml", @@ -1812,10 +1805,6 @@ dependencies = [ "tokio", ] -[[package]] -name = "martin-tile-utils" -version = "0.1.2" - [[package]] name = "md-5" version = "0.10.6" @@ -1828,9 +1817,9 @@ dependencies = [ [[package]] name = "memchr" -version = "2.6.3" +version = "2.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8f232d6ef707e1956a43342693d2a31e72989554d58299d7a88738cc95b0d35c" +checksum = "f665ee40bc4a3c5590afb1e9677db74a508659dfd71e126420da8274909a0167" [[package]] name = "memmap2" @@ -1883,43 +1872,25 @@ dependencies = [ [[package]] name = "mio" -version = "0.8.8" +version = "0.8.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "927a765cd3fc26206e66b296465fa9d3e5ab003e651c1b3c060e7956d96b19d2" +checksum = "3dce281c5e46beae905d4de1870d8b1509a9142b62eedf18b443b011ca8343d0" dependencies = [ "libc", "log", "wasi", - "windows-sys", + "windows-sys 0.48.0", ] [[package]] name = "multimap" -version = "0.9.0" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70db9248a93dc36a36d9a47898caa007a32755c7ad140ec64eeeb50d5a730631" +checksum = "e1a5d38b9b352dbd913288736af36af41c48d61b1a8cd34bcecd727561b7d511" dependencies = [ "serde", ] -[[package]] -name = "native-tls" -version = "0.2.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07226173c32f2926027b63cce4bcd8076c3552846cbe7925f3aaffeac0a3b92e" -dependencies = [ - "lazy_static", - "libc", - "log", - "openssl", - "openssl-probe", - "openssl-sys", - "schannel", - "security-framework", - "security-framework-sys", - "tempfile", -] - [[package]] name = "nom" version = "7.1.3" @@ -1981,9 +1952,9 @@ dependencies = [ [[package]] name = "num-traits" -version = "0.2.16" +version = "0.2.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f30b0abd723be7e2ffca1272140fac1a2f084c77ec3e123c192b66af1ee9e6c2" +checksum = "39e3200413f237f41ab11ad6d161bc7239c84dcb631773ccd7de3dfe4b5c267c" dependencies = [ "autocfg", "libm", @@ -2020,32 +1991,6 @@ version = "11.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0ab1bc2a289d34bd04a330323ac98a1b4bc82c9d9fcb1e66b63caa84da26b575" -[[package]] -name = "openssl" -version = "0.10.57" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bac25ee399abb46215765b1cb35bc0212377e58a061560d8b29b024fd0430e7c" -dependencies = [ - "bitflags 2.4.0", - "cfg-if", - "foreign-types", - "libc", - "once_cell", - "openssl-macros", - "openssl-sys", -] - -[[package]] -name = "openssl-macros" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.37", -] - [[package]] name = "openssl-probe" version = "0.1.5" @@ -2053,32 +1998,10 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ff011a302c396a5197692431fc1948019154afc178baf7d8e37367442a4601cf" [[package]] -name = "openssl-src" -version = "300.1.5+3.1.3" +name = "oxipng" +version = "8.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "559068e4c12950d7dcaa1857a61725c0d38d4fc03ff8e070ab31a75d6e316491" -dependencies = [ - "cc", -] - -[[package]] -name = "openssl-sys" -version = "0.9.93" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "db4d56a4c0478783083cfafcc42493dd4a981d41669da64b4572a2a089b51b1d" -dependencies = [ - "cc", - "libc", - "openssl-src", - "pkg-config", - "vcpkg", -] - -[[package]] -name = "oxipng" -version = "8.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "630638e107fb436644c300e781d3f17e1b04656138ba0d40564be4be3b06db32" +checksum = "630638e107fb436644c300e781d3f17e1b04656138ba0d40564be4be3b06db32" dependencies = [ "bitvec", "crossbeam-channel", @@ -2107,15 +2030,15 @@ dependencies = [ [[package]] name = "parking_lot_core" -version = "0.9.8" +version = "0.9.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "93f00c865fe7cabf650081affecd3871070f26767e7b2070a3ffae14c654b447" +checksum = "4c42a9226546d68acdd9c0a280d17ce19bfe27a46bf68784e4066115788d008e" dependencies = [ "cfg-if", "libc", - "redox_syscall", + "redox_syscall 0.4.1", "smallvec", - "windows-targets", + "windows-targets 0.48.5", ] [[package]] @@ -2139,9 +2062,9 @@ dependencies = [ "proc-macro2", "quote", "regex", - "regex-syntax", + "regex-syntax 0.7.5", "structmeta", - "syn 2.0.37", + "syn 2.0.39", ] [[package]] @@ -2150,6 +2073,22 @@ version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "de3145af08024dea9fa9914f381a17b8fc6034dfb00f3a84013f7ff43f29ed4c" +[[package]] +name = "pbf_font_tools" +version = "2.5.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "67768bb2719d708e2de28cec7271dae35c717122c0fa4d9f8558ef5e7fa83db7" +dependencies = [ + "futures", + "glob", + "protobuf", + "protobuf-codegen", + "protoc-bin-vendored", + "sdf_glyph_renderer", + "thiserror", + "tokio", +] + [[package]] name = "pem-rfc7468" version = "0.7.0" @@ -2314,19 +2253,6 @@ dependencies = [ "tokio-postgres", ] -[[package]] -name = "postgres-openssl" -version = "0.5.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1de0ea6504e07ca78355a6fb88ad0f36cafe9e696cbc6717f16a207f3a60be72" -dependencies = [ - "futures", - "openssl", - "tokio", - "tokio-openssl", - "tokio-postgres", -] - [[package]] name = "postgres-protocol" version = "0.6.6" @@ -2360,6 +2286,12 @@ dependencies = [ "uuid", ] +[[package]] +name = "powerfmt" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "439ee305def115ba05938db6eb1644ff94165c5ab5e9420d1c1bcedbba909391" + [[package]] name = "ppv-lite86" version = "0.2.17" @@ -2367,42 +2299,125 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5b40af805b3121feab8a3c29f04d8ad262fa8e0561883e7653e024ae4479e6de" [[package]] -name = "predicates" -version = "3.0.4" +name = "pretty_assertions" +version = "1.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "af7cee1a6c8a5b9208b3cb1061f10c0cb689087b3d8ce85fb9d2dd7a29b6ba66" +dependencies = [ + "diff", + "yansi", +] + +[[package]] +name = "proc-macro2" +version = "1.0.69" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6dfc28575c2e3f19cb3c73b93af36460ae898d426eba6fc15b9bd2a5220758a0" +checksum = "134c189feb4956b20f6f547d2cf727d4c0fe06722b20a0eec87ed445a97f92da" dependencies = [ - "anstyle", - "difflib", - "itertools 0.11.0", - "predicates-core", + "unicode-ident", ] [[package]] -name = "predicates-core" -version = "1.0.6" +name = "protobuf" +version = "3.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b794032607612e7abeb4db69adb4e33590fa6cf1149e95fd7cb00e634b92f174" +checksum = "b65f4a8ec18723a734e5dc09c173e0abf9690432da5340285d536edcb4dac190" +dependencies = [ + "once_cell", + "protobuf-support", + "thiserror", +] [[package]] -name = "predicates-tree" -version = "1.0.9" +name = "protobuf-codegen" +version = "3.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "368ba315fb8c5052ab692e68a0eefec6ec57b23a36959c14496f0b0df2c0cecf" +checksum = "6e85514a216b1c73111d9032e26cc7a5ecb1bb3d4d9539e91fb72a4395060f78" dependencies = [ - "predicates-core", - "termtree", + "anyhow", + "once_cell", + "protobuf", + "protobuf-parse", + "regex", + "tempfile", + "thiserror", ] [[package]] -name = "proc-macro2" -version = "1.0.67" +name = "protobuf-parse" +version = "3.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3d433d9f1a3e8c1263d9456598b16fec66f4acc9a74dacffd35c7bb09b3a1328" +checksum = "77d6fbd6697c9e531873e81cec565a85e226b99a0f10e1acc079be057fe2fcba" dependencies = [ - "unicode-ident", + "anyhow", + "indexmap 1.9.3", + "log", + "protobuf", + "protobuf-support", + "tempfile", + "thiserror", + "which", +] + +[[package]] +name = "protobuf-support" +version = "3.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6872f4d4f4b98303239a2b5838f5bbbb77b01ffc892d627957f37a22d7cfe69c" +dependencies = [ + "thiserror", +] + +[[package]] +name = "protoc-bin-vendored" +version = "3.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "005ca8623e5633e298ad1f917d8be0a44bcf406bf3cde3b80e63003e49a3f27d" +dependencies = [ + "protoc-bin-vendored-linux-aarch_64", + "protoc-bin-vendored-linux-ppcle_64", + "protoc-bin-vendored-linux-x86_32", + "protoc-bin-vendored-linux-x86_64", + "protoc-bin-vendored-macos-x86_64", + "protoc-bin-vendored-win32", ] +[[package]] +name = "protoc-bin-vendored-linux-aarch_64" +version = "3.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8fb9fc9cce84c8694b6ea01cc6296617b288b703719b725b8c9c65f7c5874435" + +[[package]] +name = "protoc-bin-vendored-linux-ppcle_64" +version = "3.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "02d2a07dcf7173a04d49974930ccbfb7fd4d74df30ecfc8762cf2f895a094516" + +[[package]] +name = "protoc-bin-vendored-linux-x86_32" +version = "3.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d54fef0b04fcacba64d1d80eed74a20356d96847da8497a59b0a0a436c9165b0" + +[[package]] +name = "protoc-bin-vendored-linux-x86_64" +version = "3.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b8782f2ce7d43a9a5c74ea4936f001e9e8442205c244f7a3d4286bd4c37bc924" + +[[package]] +name = "protoc-bin-vendored-macos-x86_64" +version = "3.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b5de656c7ee83f08e0ae5b81792ccfdc1d04e7876b1d9a38e6876a9e09e02537" + +[[package]] +name = "protoc-bin-vendored-win32" +version = "3.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9653c3ed92974e34c5a6e0a510864dab979760481714c172e0a34e437cb98804" + [[package]] name = "quote" version = "1.0.33" @@ -2483,27 +2498,36 @@ dependencies = [ "bitflags 1.3.2", ] +[[package]] +name = "redox_syscall" +version = "0.4.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4722d768eff46b75989dd134e5c353f0d6296e5aaa3132e776cbdb56be7731aa" +dependencies = [ + "bitflags 1.3.2", +] + [[package]] name = "regex" -version = "1.9.5" +version = "1.10.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "697061221ea1b4a94a624f67d0ae2bfe4e22b8a17b6a192afb11046542cc8c47" +checksum = "380b951a9c5e80ddfd6136919eef32310721aa4aacd4889a8d39124b026ab343" dependencies = [ "aho-corasick", "memchr", "regex-automata", - "regex-syntax", + "regex-syntax 0.8.2", ] [[package]] name = "regex-automata" -version = "0.3.8" +version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c2f401f4955220693b56f8ec66ee9c78abffd8d1c4f23dc41a23839eb88f0795" +checksum = "5f804c7828047e88b2d32e2d7fe5a105da8ee3264f01902f796c8e067dc2483f" dependencies = [ "aho-corasick", "memchr", - "regex-syntax", + "regex-syntax 0.8.2", ] [[package]] @@ -2512,11 +2536,23 @@ version = "0.7.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dbb5fb1acd8a1a18b3dd5be62d25485eb770e05afb408a9627d14d451bae12da" +[[package]] +name = "regex-syntax" +version = "0.8.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c08c74e62047bb2de4ff487b251e4a92e24f48745648451635cec7d591162d9f" + +[[package]] +name = "relative-path" +version = "1.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c707298afce11da2efef2f600116fa93ffa7a032b5d7b628aa17711ec81383ca" + [[package]] name = "resvg" -version = "0.34.1" +version = "0.35.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0e3d65cea36eefb28a020edb6e66341764e00cd4b426e0c1f0599b1adaa78f5" +checksum = "b6554f47c38eca56827eea7f285c2a3018b4e12e0e195cc105833c008be338f1" dependencies = [ "gif", "jpeg-decoder", @@ -2531,9 +2567,9 @@ dependencies = [ [[package]] name = "rgb" -version = "0.8.36" +version = "0.8.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "20ec2d3e3fc7a92ced357df9cebd5a10b6fb2aa1ee797bf7e9ce2f17dffc8f59" +checksum = "05aaa8004b64fd573fc9d002f4e632d51ad4f026c2b5ba95fcb6c2f32c2c47d8" dependencies = [ "bytemuck", ] @@ -2548,32 +2584,44 @@ dependencies = [ "libc", "once_cell", "spin 0.5.2", - "untrusted", + "untrusted 0.7.1", "web-sys", "winapi", ] +[[package]] +name = "ring" +version = "0.17.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fb0205304757e5d899b9c2e448b867ffd03ae7f988002e47cd24954391394d0b" +dependencies = [ + "cc", + "getrandom", + "libc", + "spin 0.9.8", + "untrusted 0.9.0", + "windows-sys 0.48.0", +] + [[package]] name = "roxmltree" -version = "0.18.0" +version = "0.18.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d8f595a457b6b8c6cda66a48503e92ee8d19342f905948f29c383200ec9eb1d8" +checksum = "862340e351ce1b271a378ec53f304a5558f7db87f3769dc655a8f6ecbb68b302" dependencies = [ "xmlparser", ] [[package]] name = "rsa" -version = "0.9.2" +version = "0.9.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6ab43bb47d23c1a631b4b680199a45255dce26fa9ab2fa902581f624ff13e6a8" +checksum = "86ef35bf3e7fe15a53c4ab08a998e42271eab13eb0db224126bc7bc4c4bad96d" dependencies = [ - "byteorder", "const-oid", "digest", "num-bigint-dig", "num-integer", - "num-iter", "num-traits", "pkcs1", "pkcs8", @@ -2584,13 +2632,42 @@ dependencies = [ "zeroize", ] +[[package]] +name = "rstest" +version = "0.18.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "97eeab2f3c0a199bc4be135c36c924b6590b88c377d416494288c14f2db30199" +dependencies = [ + "futures", + "futures-timer", + "rstest_macros", + "rustc_version", +] + +[[package]] +name = "rstest_macros" +version = "0.18.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d428f8247852f894ee1be110b375111b586d4fa431f6c46e64ba5a0dcccbe605" +dependencies = [ + "cfg-if", + "glob", + "proc-macro2", + "quote", + "regex", + "relative-path", + "rustc_version", + "syn 2.0.39", + "unicode-ident", +] + [[package]] name = "rusqlite" version = "0.29.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "549b9d036d571d42e6e85d1c1425e2ac83491075078ca9a15be021c56b1641f2" dependencies = [ - "bitflags 2.4.0", + "bitflags 2.4.1", "fallible-iterator", "fallible-streaming-iterator", "hashlink", @@ -2621,28 +2698,41 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.19" +version = "0.38.21" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "745ecfa778e66b2b63c88a61cb36e0eea109e803b0b86bf9879fbc77c70e86ed" +checksum = "2b426b0506e5d50a7d8dafcf2e81471400deb602392c7dd110815afb4eaf02a3" dependencies = [ - "bitflags 2.4.0", + "bitflags 2.4.1", "errno", "libc", "linux-raw-sys", - "windows-sys", + "windows-sys 0.48.0", ] [[package]] name = "rustls" -version = "0.21.7" +version = "0.21.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd8d6c9f025a446bc4d18ad9632e69aec8f287aa84499ee335599fabd20c3fd8" +checksum = "446e14c5cda4f3f30fe71863c34ec70f5ac79d6087097ad0bb433e1be5edf04c" dependencies = [ - "ring", + "log", + "ring 0.17.5", "rustls-webpki", "sct", ] +[[package]] +name = "rustls-native-certs" +version = "0.6.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a9aace74cb666635c918e9c12bc0d348266037aa8eb599b5cba565709a8dff00" +dependencies = [ + "openssl-probe", + "rustls-pemfile", + "schannel", + "security-framework", +] + [[package]] name = "rustls-pemfile" version = "1.0.3" @@ -2654,12 +2744,12 @@ dependencies = [ [[package]] name = "rustls-webpki" -version = "0.101.6" +version = "0.101.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3c7d5dece342910d9ba34d259310cae3e0154b873b35408b787b59bce53d34fe" +checksum = "8b6275d1ee7a1cd780b64aca7726599a1dbc893b1e64144529e55c3c2f745765" dependencies = [ - "ring", - "untrusted", + "ring 0.17.5", + "untrusted 0.9.0", ] [[package]] @@ -2699,7 +2789,7 @@ version = "0.1.22" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0c3733bf4cf7ea0880754e19cb5a462007c4a8c1914bff372ccc95b464f1df88" dependencies = [ - "windows-sys", + "windows-sys 0.48.0", ] [[package]] @@ -2710,12 +2800,22 @@ checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49" [[package]] name = "sct" -version = "0.7.0" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d53dcdb7c9f8158937a7981b48accfd39a43af418591a5d008c7b22b5e1b7ca4" +checksum = "da046153aa2352493d6cb7da4b6e5c0c057d8a1d0a9aa8560baffdd945acd414" dependencies = [ - "ring", - "untrusted", + "ring 0.17.5", + "untrusted 0.9.0", +] + +[[package]] +name = "sdf_glyph_renderer" +version = "1.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8b05c114d181e20b509e03b05856cc5823bc6189d581c276fe37c5ebc5e3b3b9" +dependencies = [ + "freetype-rs", + "thiserror", ] [[package]] @@ -2743,37 +2843,37 @@ dependencies = [ [[package]] name = "semver" -version = "1.0.19" +version = "1.0.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ad977052201c6de01a8ef2aa3378c4bd23217a056337d1d6da40468d267a4fb0" +checksum = "836fa6a3e1e547f9a2c4040802ec865b5d85f4014efe00555d7090a3dcaa1090" [[package]] name = "serde" -version = "1.0.188" +version = "1.0.190" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf9e0fcba69a370eed61bcf2b728575f726b50b55cba78064753d708ddc7549e" +checksum = "91d3c334ca1ee894a2c6f6ad698fe8c435b76d504b13d436f0685d648d6d96f7" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.188" +version = "1.0.190" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4eca7ac642d82aa35b60049a6eccb4be6be75e599bd2e9adb5f875a737654af2" +checksum = "67c5609f394e5c2bd7fc51efda478004ea80ef42fee983d5c67a65e34f32c0e3" dependencies = [ "proc-macro2", "quote", - "syn 2.0.37", + "syn 2.0.39", ] [[package]] name = "serde_json" -version = "1.0.107" +version = "1.0.108" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6b420ce6e3d8bd882e9b243c6eed35dbc9a6110c9769e74b584e0d68d1f20c65" +checksum = "3d1c7e3eac408d115102c4c24ad393e0821bb3a5df4d506a80f85f7a742a526b" dependencies = [ - "indexmap 2.0.1", + "indexmap 2.1.0", "itoa", "ryu", "serde", @@ -2814,11 +2914,11 @@ dependencies = [ [[package]] name = "serde_yaml" -version = "0.9.25" +version = "0.9.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a49e178e4452f45cb61d0cd8cebc1b0fafd3e41929e996cef79aa3aca91f574" +checksum = "3cc7a1570e38322cfe4154732e5110f887ea57e22b76f4bfd32b5bdd3368666c" dependencies = [ - "indexmap 2.0.1", + "indexmap 2.1.0", "itoa", "ryu", "serde", @@ -2872,6 +2972,12 @@ version = "0.3.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d66dc143e6b11c1eddc06d5c423cfc97062865baf299914ab64caa38182078fe" +[[package]] +name = "similar" +version = "2.3.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2aeaf503862c419d66959f5d7ca015337d864e9c49485d771b732e2a20453597" + [[package]] name = "simplecss" version = "0.2.1" @@ -2913,12 +3019,12 @@ checksum = "942b4a808e05215192e39f4ab80813e599068285906cc91aa64f923db842bd5a" [[package]] name = "socket2" -version = "0.5.4" +version = "0.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4031e820eb552adee9295814c0ced9e5cf38ddf1e8b7d566d6de8e2538ea989e" +checksum = "7b5fac59a5cb5dd637972e5fca70daf0523c9067fcdc4842f053dae04a18f8e9" dependencies = [ "libc", - "windows-sys", + "windows-sys 0.48.0", ] [[package]] @@ -2948,18 +3054,14 @@ dependencies = [ [[package]] name = "spreet" -version = "0.8.0" +version = "0.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "357c86676a23af570a68dbc1f59d2d6442d12ac7c94c41b8317f30706f5ca05d" +checksum = "c73c2f90a7b1281c08144af5dc91f2e32fdc4752d764aa4ff95c224f7b51502c" dependencies = [ - "assert_fs", - "clap", "crunch", - "exitcode", "multimap", "oxipng", "png", - "rayon", "resvg", "serde", "serde_json", @@ -2978,15 +3080,14 @@ dependencies = [ [[package]] name = "sqlite-hashes" -version = "0.3.2" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f756a7c1f66e2d70c9acb5881776ba0ae25ba2aaf68e2f69ed32d96c42313fab" +checksum = "9d7ef02a3d30492f243536808bba25455404ed91aaf91309bf55c4b036e9e8da" dependencies = [ "digest", + "hex", "md-5", "rusqlite", - "sha1", - "sha2", ] [[package]] @@ -3024,15 +3125,12 @@ dependencies = [ "futures-util", "hashlink", "hex", - "indexmap 2.0.1", + "indexmap 2.1.0", "log", "memchr", - "native-tls", "once_cell", "paste", "percent-encoding", - "rustls", - "rustls-pemfile", "serde", "serde_json", "sha2", @@ -3043,7 +3141,6 @@ dependencies = [ "tokio-stream", "tracing", "url", - "webpki-roots", ] [[package]] @@ -3092,7 +3189,7 @@ checksum = "864b869fdf56263f4c95c45483191ea0af340f9f3e3e7b4d57a61c7c87a970db" dependencies = [ "atoi", "base64", - "bitflags 2.4.0", + "bitflags 2.4.1", "byteorder", "bytes", "crc", @@ -3134,7 +3231,7 @@ checksum = "eb7ae0e6a97fb3ba33b23ac2671a5ce6e3cabe003f451abd5a56e7951d975624" dependencies = [ "atoi", "base64", - "bitflags 2.4.0", + "bitflags 2.4.1", "byteorder", "crc", "dotenvy", @@ -3222,7 +3319,7 @@ dependencies = [ "proc-macro2", "quote", "structmeta-derive", - "syn 2.0.37", + "syn 2.0.39", ] [[package]] @@ -3233,7 +3330,7 @@ checksum = "a60bcaff7397072dca0017d1db428e30d5002e00b6847703e2e42005c95fbe00" dependencies = [ "proc-macro2", "quote", - "syn 2.0.37", + "syn 2.0.39", ] [[package]] @@ -3277,9 +3374,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.37" +version = "2.0.39" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7303ef2c05cd654186cb250d29049a24840ca25d2747c25c0381c8d9e2f582e8" +checksum = "23e78b90f2fcf45d3e842032ce32e3f2d1545ba6636271dcbf24fa306d87be7a" dependencies = [ "proc-macro2", "quote", @@ -3294,15 +3391,15 @@ checksum = "55937e1799185b12863d447f42597ed69d9928686b8d88a1df17376a097d8369" [[package]] name = "tempfile" -version = "3.8.0" +version = "3.8.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cb94d2f3cc536af71caac6b6fcebf65860b347e7ce0cc9ebe8f70d3e521054ef" +checksum = "7ef1adac450ad7f4b3c28589471ade84f25f731a7a0fe30d71dfa9f60fd808e5" dependencies = [ "cfg-if", "fastrand", - "redox_syscall", + "redox_syscall 0.4.1", "rustix", - "windows-sys", + "windows-sys 0.48.0", ] [[package]] @@ -3314,40 +3411,24 @@ dependencies = [ "winapi-util", ] -[[package]] -name = "termtree" -version = "0.4.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3369f5ac52d5eb6ab48c6b4ffdc8efbcad6b89c765749064ba298f2c68a16a76" - [[package]] name = "thiserror" -version = "1.0.49" +version = "1.0.50" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1177e8c6d7ede7afde3585fd2513e611227efd6481bd78d2e82ba1ce16557ed4" +checksum = "f9a7210f5c9a7156bb50aa36aed4c95afb51df0df00713949448cf9e97d382d2" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.49" +version = "1.0.50" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "10712f02019e9288794769fba95cd6847df9874d49d871d062172f9dd41bc4cc" +checksum = "266b2e40bc00e5a6c09c3584011e08b06f123c00362c92b975ba9843aaaa14b8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.37", -] - -[[package]] -name = "thread_local" -version = "1.1.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fdd6f064ccff2d6567adcb3873ca630700f00b5ad3f060c25b5dcfd9a4ce152" -dependencies = [ - "cfg-if", - "once_cell", + "syn 2.0.39", ] [[package]] @@ -3363,12 +3444,13 @@ dependencies = [ [[package]] name = "time" -version = "0.3.29" +version = "0.3.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "426f806f4089c493dcac0d24c29c01e2c38baf8e30f1b716ee37e83d200b18fe" +checksum = "c4a34ab300f2dee6e562c10a046fc05e358b29f9bf92277f30c3c8d82275f6f5" dependencies = [ "deranged", "itoa", + "powerfmt", "serde", "time-core", "time-macros", @@ -3442,9 +3524,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" [[package]] name = "tokio" -version = "1.32.0" +version = "1.33.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17ed6077ed6cd6c74735e21f37eb16dc3935f96878b1fe961074089cc80893f9" +checksum = "4f38200e3ef7995e5ef13baec2f432a6da0aa9ac495b2c0e8f3b7eec2c92d653" dependencies = [ "backtrace", "bytes", @@ -3456,7 +3538,7 @@ dependencies = [ "signal-hook-registry", "socket2", "tokio-macros", - "windows-sys", + "windows-sys 0.48.0", ] [[package]] @@ -3467,19 +3549,7 @@ checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.37", -] - -[[package]] -name = "tokio-openssl" -version = "0.6.3" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c08f9ffb7809f1b20c1b398d92acf4cc719874b3b2b2d9ea2f09b4a80350878a" -dependencies = [ - "futures-util", - "openssl", - "openssl-sys", - "tokio", + "syn 2.0.39", ] [[package]] @@ -3508,6 +3578,30 @@ dependencies = [ "whoami", ] +[[package]] +name = "tokio-postgres-rustls" +version = "0.10.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd5831152cb0d3f79ef5523b357319ba154795d64c7078b2daa95a803b54057f" +dependencies = [ + "futures", + "ring 0.16.20", + "rustls", + "tokio", + "tokio-postgres", + "tokio-rustls", +] + +[[package]] +name = "tokio-rustls" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c28327cf380ac148141087fbfb9de9d7bd4e84ab5d2c28fbc911d753de8a7081" +dependencies = [ + "rustls", + "tokio", +] + [[package]] name = "tokio-stream" version = "0.1.14" @@ -3521,9 +3615,9 @@ dependencies = [ [[package]] name = "tokio-util" -version = "0.7.9" +version = "0.7.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d68074620f57a0b21594d9735eb2e98ab38b17f80d3fcb189fca266771ca60d" +checksum = "5419f34732d9eb6ee4c3578b7989078579b7f039cbbb9ca2c4da015749371e15" dependencies = [ "bytes", "futures-core", @@ -3533,13 +3627,21 @@ dependencies = [ "tracing", ] +[[package]] +name = "toml" +version = "0.5.11" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4f7f0dd8d50a853a531c426359045b1998f04219d88799810762cd4ad314234" +dependencies = [ + "serde", +] + [[package]] name = "tracing" -version = "0.1.37" +version = "0.1.40" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ce8c33a8d48bd45d624a6e523445fd21ec13d3653cd51f681abf67418f54eb8" +checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef" dependencies = [ - "cfg-if", "log", "pin-project-lite", "tracing-attributes", @@ -3548,20 +3650,20 @@ dependencies = [ [[package]] name = "tracing-attributes" -version = "0.1.26" +version = "0.1.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab" +checksum = "34704c8d6ebcbc939824180af020566b01a7c01f80641264eba0999f6c2b6be7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.37", + "syn 2.0.39", ] [[package]] name = "tracing-core" -version = "0.1.31" +version = "0.1.32" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0955b8137a1df6f1a2e9a37d8a6656291ff0297c1a97c24e0d8425fe2312f79a" +checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54" dependencies = [ "once_cell", ] @@ -3680,6 +3782,12 @@ version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a156c684c91ea7d62626509bce3cb4e1d9ed5c4d978f7b4352658f96a4c26b4a" +[[package]] +name = "untrusted" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" + [[package]] name = "url" version = "2.4.1" @@ -3693,9 +3801,9 @@ dependencies = [ [[package]] name = "usvg" -version = "0.34.1" +version = "0.35.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d2304b933107198a910c1f3219acb65246f2b148f862703cffd51c6e62156abe" +checksum = "14d09ddfb0d93bf84824c09336d32e42f80961a9d1680832eb24fdf249ce11e6" dependencies = [ "base64", "log", @@ -3708,9 +3816,9 @@ dependencies = [ [[package]] name = "usvg-parser" -version = "0.34.0" +version = "0.35.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "12b940fea80394e3b14cb21c83fa1b8f8a41023c25929bba68bb84a76193ebed" +checksum = "d19bf93d230813599927d88557014e0908ecc3531666d47c634c6838bc8db408" dependencies = [ "data-url", "flate2", @@ -3726,9 +3834,9 @@ dependencies = [ [[package]] name = "usvg-text-layout" -version = "0.34.0" +version = "0.35.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "69dfd6119f431aa7e969b4a69f9cc8b9ae37b8ae85bb26780ccfa3beaf8b71eb" +checksum = "035044604e89652c0a2959b8b356946997a52649ba6cade45928c2842376feb4" dependencies = [ "fontdb", "kurbo", @@ -3742,9 +3850,9 @@ dependencies = [ [[package]] name = "usvg-tree" -version = "0.34.0" +version = "0.35.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3185eb13b6e3d3cf1817d29612251cc308d5a7e5e6235362e67efe832435c6d9" +checksum = "7939a7e4ed21cadb5d311d6339730681c3e24c3e81d60065be80e485d3fc8b92" dependencies = [ "rctree", "strict-num", @@ -3760,9 +3868,9 @@ checksum = "711b9620af191e0cdc7468a8d14e709c3dcdb115b36f838e601583af800a370a" [[package]] name = "uuid" -version = "1.4.1" +version = "1.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "79daa5ed5740825c40b389c5e50312b9c86df53fccd33f281df655642b43869d" +checksum = "88ad59a7560b41a70d191093a945f0b87bc1deeda46fb237479708a1d6b6cdfc" [[package]] name = "varint-rs" @@ -3800,9 +3908,9 @@ checksum = "9c8d87e72b64a3b4db28d11ce29237c246188f4f51057d65a7eab63b7987e423" [[package]] name = "wasm-bindgen" -version = "0.2.87" +version = "0.2.88" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7706a72ab36d8cb1f80ffbf0e071533974a60d0a308d01a5d0375bf60499a342" +checksum = "7daec296f25a1bae309c0cd5c29c4b260e510e6d813c286b19eaadf409d40fce" dependencies = [ "cfg-if", "wasm-bindgen-macro", @@ -3810,24 +3918,24 @@ dependencies = [ [[package]] name = "wasm-bindgen-backend" -version = "0.2.87" +version = "0.2.88" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5ef2b6d3c510e9625e5fe6f509ab07d66a760f0885d858736483c32ed7809abd" +checksum = "e397f4664c0e4e428e8313a469aaa58310d302159845980fd23b0f22a847f217" dependencies = [ "bumpalo", "log", "once_cell", "proc-macro2", "quote", - "syn 2.0.37", + "syn 2.0.39", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-macro" -version = "0.2.87" +version = "0.2.88" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dee495e55982a3bd48105a7b947fd2a9b4a8ae3010041b9e0faab3f9cd028f1d" +checksum = "5961017b3b08ad5f3fe39f1e79877f8ee7c23c5e5fd5eb80de95abc41f1f16b2" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -3835,48 +3943,51 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.87" +version = "0.2.88" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b" +checksum = "c5353b8dab669f5e10f5bd76df26a9360c748f054f862ff5f3f8aae0c7fb3907" dependencies = [ "proc-macro2", "quote", - "syn 2.0.37", + "syn 2.0.39", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.87" +version = "0.2.88" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca6ad05a4870b2bf5fe995117d3728437bd27d7cd5f06f13c17443ef369775a1" +checksum = "0d046c5d029ba91a1ed14da14dca44b68bf2f124cfbaf741c54151fdb3e0750b" [[package]] name = "web-sys" -version = "0.3.64" +version = "0.3.65" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b85cbef8c220a6abc02aefd892dfc0fc23afb1c6a426316ec33253a3877249b" +checksum = "5db499c5f66323272151db0e666cd34f78617522fb0c1604d31a27c50c206a85" dependencies = [ "js-sys", "wasm-bindgen", ] -[[package]] -name = "webpki-roots" -version = "0.24.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b291546d5d9d1eab74f069c77749f2cb8504a12caa20f0f2de93ddbf6f411888" -dependencies = [ - "rustls-webpki", -] - [[package]] name = "weezl" version = "0.1.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9193164d4de03a926d909d3bc7c30543cecb35400c02114792c2cae20d5e2dbb" +[[package]] +name = "which" +version = "4.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "87ba24419a2078cd2b0f2ede2691b6c66d8e47836da3b6db8265ebad47afbfc7" +dependencies = [ + "either", + "home", + "once_cell", + "rustix", +] + [[package]] name = "whoami" version = "1.4.1" @@ -3918,13 +4029,37 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" +[[package]] +name = "windows-sys" +version = "0.45.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75283be5efb2831d37ea142365f009c02ec203cd29a3ebecbc093d52315b66d0" +dependencies = [ + "windows-targets 0.42.2", +] + [[package]] name = "windows-sys" version = "0.48.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "677d2418bec65e3338edb076e806bc1ec15693c5d0104683f2efe857f61056a9" dependencies = [ - "windows-targets", + "windows-targets 0.48.5", +] + +[[package]] +name = "windows-targets" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8e5180c00cd44c9b1c88adb3693291f1cd93605ded80c250a75d472756b4d071" +dependencies = [ + "windows_aarch64_gnullvm 0.42.2", + "windows_aarch64_msvc 0.42.2", + "windows_i686_gnu 0.42.2", + "windows_i686_msvc 0.42.2", + "windows_x86_64_gnu 0.42.2", + "windows_x86_64_gnullvm 0.42.2", + "windows_x86_64_msvc 0.42.2", ] [[package]] @@ -3933,51 +4068,93 @@ version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9a2fa6e2155d7247be68c096456083145c183cbbbc2764150dda45a87197940c" dependencies = [ - "windows_aarch64_gnullvm", - "windows_aarch64_msvc", - "windows_i686_gnu", - "windows_i686_msvc", - "windows_x86_64_gnu", - "windows_x86_64_gnullvm", - "windows_x86_64_msvc", + "windows_aarch64_gnullvm 0.48.5", + "windows_aarch64_msvc 0.48.5", + "windows_i686_gnu 0.48.5", + "windows_i686_msvc 0.48.5", + "windows_x86_64_gnu 0.48.5", + "windows_x86_64_gnullvm 0.48.5", + "windows_x86_64_msvc 0.48.5", ] +[[package]] +name = "windows_aarch64_gnullvm" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "597a5118570b68bc08d8d59125332c54f1ba9d9adeedeef5b99b02ba2b0698f8" + [[package]] name = "windows_aarch64_gnullvm" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2b38e32f0abccf9987a4e3079dfb67dcd799fb61361e53e2882c3cbaf0d905d8" +[[package]] +name = "windows_aarch64_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e08e8864a60f06ef0d0ff4ba04124db8b0fb3be5776a5cd47641e942e58c4d43" + [[package]] name = "windows_aarch64_msvc" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "dc35310971f3b2dbbf3f0690a219f40e2d9afcf64f9ab7cc1be722937c26b4bc" +[[package]] +name = "windows_i686_gnu" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c61d927d8da41da96a81f029489353e68739737d3beca43145c8afec9a31a84f" + [[package]] name = "windows_i686_gnu" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a75915e7def60c94dcef72200b9a8e58e5091744960da64ec734a6c6e9b3743e" +[[package]] +name = "windows_i686_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "44d840b6ec649f480a41c8d80f9c65108b92d89345dd94027bfe06ac444d1060" + [[package]] name = "windows_i686_msvc" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8f55c233f70c4b27f66c523580f78f1004e8b5a8b659e05a4eb49d4166cca406" +[[package]] +name = "windows_x86_64_gnu" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8de912b8b8feb55c064867cf047dda097f92d51efad5b491dfb98f6bbb70cb36" + [[package]] name = "windows_x86_64_gnu" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "53d40abd2583d23e4718fddf1ebec84dbff8381c07cae67ff7768bbf19c6718e" +[[package]] +name = "windows_x86_64_gnullvm" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "26d41b46a36d453748aedef1486d5c7a85db22e56aff34643984ea85514e94a3" + [[package]] name = "windows_x86_64_gnullvm" version = "0.48.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b7b52767868a23d5bab768e390dc5f5c55825b6d30b86c844ff2dc7414044cc" +[[package]] +name = "windows_x86_64_msvc" +version = "0.42.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9aec5da331524158c6d1a4ac0ab1541149c0b9505fde06423b02f5ef0106b9f0" + [[package]] name = "windows_x86_64_msvc" version = "0.48.5" @@ -3995,9 +4172,9 @@ dependencies = [ [[package]] name = "xmlparser" -version = "0.13.5" +version = "0.13.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4d25c75bf9ea12c4040a97f829154768bbbce366287e2dc044af160cd79a13fd" +checksum = "66fee0b777b0f5ac1c69bb06d361268faafa61cd4682ae064a171c16c433e9e4" [[package]] name = "xmlwriter" @@ -4005,6 +4182,41 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ec7a2a501ed189703dba8b08142f057e887dfc4b2cc4db2d343ac6376ba3e0b9" +[[package]] +name = "yaml-rust" +version = "0.4.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "56c1936c4cc7a1c9ab21a1ebb602eb942ba868cbd44a99cb7cdc5892335e1c85" +dependencies = [ + "linked-hash-map", +] + +[[package]] +name = "yansi" +version = "0.5.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec" + +[[package]] +name = "zerocopy" +version = "0.7.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8cd369a67c0edfef15010f980c3cbe45d7f651deac2cd67ce097cd801de16557" +dependencies = [ + "zerocopy-derive", +] + +[[package]] +name = "zerocopy-derive" +version = "0.7.25" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c2f140bda219a26ccc0cdb03dba58af72590c53b22642577d88a927bc5c87d6b" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.39", +] + [[package]] name = "zeroize" version = "1.6.0" @@ -4029,7 +4241,16 @@ version = "0.12.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1a27595e173641171fc74a1232b7b1c7a7cb6e18222c11e9dfb9888fa424c53c" dependencies = [ - "zstd-safe", + "zstd-safe 6.0.6", +] + +[[package]] +name = "zstd" +version = "0.13.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "bffb3309596d527cfcba7dfc6ed6052f1d39dfbd7c867aa2e865e4a449c10110" +dependencies = [ + "zstd-safe 7.0.0", ] [[package]] @@ -4042,13 +4263,21 @@ dependencies = [ "zstd-sys", ] +[[package]] +name = "zstd-safe" +version = "7.0.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "43747c7422e2924c11144d5229878b98180ef8b06cca4ab5af37afc8a8d8ea3e" +dependencies = [ + "zstd-sys", +] + [[package]] name = "zstd-sys" -version = "2.0.8+zstd.1.5.5" +version = "2.0.9+zstd.1.5.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5556e6ee25d32df2586c098bbfa278803692a20d0ab9565e049480d52707ec8c" +checksum = "9e16efa8a874a0481a574084d34cc26fdb3b99627480f785888deb6386506656" dependencies = [ "cc", - "libc", "pkg-config", ] diff --git a/Cargo.toml b/Cargo.toml index acc78dcba..1bd28c932 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [workspace] resolver = "2" -members = ["martin", "martin-tile-utils", "martin-mbtiles"] +members = ["martin", "martin-tile-utils", "mbtiles"] [workspace.package] edition = "2021" @@ -11,48 +11,61 @@ readme = "README.md" homepage = "https://martin.maplibre.org/" [workspace.dependencies] -actix = "0.13" actix-cors = "0.6" actix-http = "3" actix-rt = "2" actix-web = "4" anyhow = "1.0" async-trait = "0.1" +bit-set = "0.5.3" brotli = "3" cargo-husky = { version = "1", features = ["user-hooks"], default-features = false } clap = { version = "4", features = ["derive"] } criterion = { version = "0.5", features = ["async_futures", "async_tokio", "html_reports"] } ctor = "0.2" deadpool-postgres = "0.11" +enum-display = "0.1" env_logger = "0.10" flate2 = "1" futures = "0.3" indoc = "2" +insta = "1" itertools = "0.11" -json-patch = "1.1" +json-patch = "1.2" log = "0.4" -martin-mbtiles = { path = "./martin-mbtiles", version = "0.4.0", default-features = false, features = ["native-tls"] } # disable CLI tools martin-tile-utils = { path = "./martin-tile-utils", version = "0.1.0" } +mbtiles = { path = "./mbtiles", version = "0.7.0", default-features = false } num_cpus = "1" -openssl = "0.10" +pbf_font_tools = { version = "2.5.0", features = ["freetype"] } pmtiles = { version = "0.3", features = ["mmap-async-tokio", "tilejson"] } postgis = "0.9" postgres = { version = "0.19", features = ["with-time-0_3", "with-uuid-1", "with-serde_json-1"] } -postgres-openssl = "0.5" postgres-protocol = "0.6" +pretty_assertions = "1" regex = "1" +rstest = "0.18" +rustls = { version = "0.21", features = ["dangerous_configuration"] } +rustls-native-certs = "0.6" +rustls-pemfile = "1" semver = "1" serde = { version = "1", features = ["derive"] } serde_json = "1" serde_yaml = "0.9" -spreet = { version = "0.8", default-features = false } -sqlite-hashes = "0.3" -sqlx = { version = "0.7", features = ["sqlite"] } +spreet = { version = "0.9", default-features = false } +sqlite-hashes = { version = "0.5", default-features = false, features = ["md5", "window", "hex"] } +sqlx = { version = "0.7", features = ["sqlite", "runtime-tokio"] } subst = { version = "0.3", features = ["yaml"] } thiserror = "1" tilejson = "0.3" -tokio = { version = "1.32.0", features = ["macros"] } +tokio = { version = "1.33.0", features = ["macros"] } +tokio-postgres-rustls = "0.10" -[profile.dev.package.sqlx-macros] +[profile.dev.package] # See https://github.com/launchbadge/sqlx#compile-time-verification -opt-level = 3 +sqlx-macros.opt-level = 3 +# See https://docs.rs/insta/latest/insta/#optional-faster-runs +insta.opt-level = 3 +similar.opt-level = 3 + +#[patch.crates-io] +#sqlite-hashes = { path = "/home/nyurik/dev/rust/sqlite-hashes" } diff --git a/Dockerfile b/Dockerfile index 71ffdf5e8..035f84ab6 100755 --- a/Dockerfile +++ b/Dockerfile @@ -3,10 +3,10 @@ FROM rust:alpine as builder WORKDIR /usr/src/martin RUN apk update \ - && apk add --no-cache openssl-dev musl-dev perl build-base + && apk add --no-cache musl-dev perl build-base COPY . . -RUN CARGO_REGISTRIES_CRATES_IO_PROTOCOL=sparse cargo build --release --features=vendored-openssl +RUN CARGO_REGISTRIES_CRATES_IO_PROTOCOL=sparse cargo build --release FROM alpine:latest diff --git a/README.md b/README.md index df2442e10..a0055cab6 100755 --- a/README.md +++ b/README.md @@ -1,4 +1,4 @@ -# Martin +[![Martin](https://raw.githubusercontent.com/maplibre/martin/main/logo.png)](https://maplibre.org/martin/) [![Book](https://img.shields.io/badge/docs-Book-informational)](https://maplibre.org/martin) [![docs.rs docs](https://docs.rs/martin/badge.svg)](https://docs.rs/martin) @@ -6,46 +6,48 @@ [![GitHub](https://img.shields.io/badge/github-maplibre/martin-8da0cb?logo=github)](https://github.com/maplibre/martin) [![crates.io version](https://img.shields.io/crates/v/martin.svg)](https://crates.io/crates/martin) [![Security audit](https://github.com/maplibre/martin/workflows/Security%20audit/badge.svg)](https://github.com/maplibre/martin/security) -[![CI build](https://github.com/maplibre/martin/workflows/CI/badge.svg)](https://github.com/maplibre/martin/actions) +[![CI build](https://github.com/maplibre/martin/actions/workflows/ci.yml/badge.svg)](https://github.com/maplibre/martin/actions) [![](https://img.shields.io/badge/Slack-%23maplibre--martin-2EB67D?logo=slack)](https://slack.openstreetmap.us/) Martin is a tile server able to generate and serve [vector tiles](https://github.com/mapbox/vector-tile-spec) on the fly from large [PostGIS](https://github.com/postgis/postgis) databases, [PMTile](https://protomaps.com/blog/pmtiles-v3-whats-new), and [MBTile](https://github.com/mapbox/mbtiles-spec) files, allowing multiple tile sources to be dynamically combined into one. Martin optimizes for speed and heavy traffic, and is written in [Rust](https://github.com/rust-lang/rust). See [Martin book](https://maplibre.org/martin/) for complete documentation. -![Martin](https://raw.githubusercontent.com/maplibre/martin/main/logo.png) - ## Installation -_See [installation instructions](https://maplibre.org/martin/installation.html) in the Martin book._ +_See [installation instructions](https://maplibre.org/martin/10-installation.html) in the Martin book._ **Prerequisites:** If using Martin with PostgreSQL database, you must install PostGIS with at least v3.0+, v3.1+ recommended. You can download martin from [GitHub releases page](https://github.com/maplibre/martin/releases). -| Platform | Downloads (latest) | -|----------|------------------------| -| Linux | [64-bit][rl-linux-tar] | -| macOS | [64-bit][rl-macos-tar] | -| Windows | [64-bit][rl-win64-zip] | - -[rl-linux-tar]: https://github.com/maplibre/martin/releases/latest/download/martin-Linux-x86_64.tar.gz -[rl-macos-tar]: https://github.com/maplibre/martin/releases/latest/download/martin-Darwin-x86_64.tar.gz +| Platform | AMD-64 | ARM-64 | +|----------|--------------------------------------------------------------------------------------------------|-------------------------------------| +| Linux | [.tar.gz][rl-linux-x64] (gnu)
[.tar.gz][rl-linux-x64-musl] (musl)
[.deb][rl-linux-x64-deb] | [.tar.gz][rl-linux-a64-musl] (musl) | +| macOS | [.tar.gz][rl-macos-x64] | [.tar.gz][rl-macos-a64] | +| Windows | [.zip][rl-win64-zip] | | + +[rl-linux-x64]: https://github.com/maplibre/martin/releases/latest/download/martin-Linux-x86_64.tar.gz +[rl-linux-x64-musl]: https://github.com/maplibre/martin/releases/latest/download/martin-Linux-x86_64-musl.tar.gz +[rl-linux-x64-deb]: https://github.com/maplibre/martin/releases/latest/download/martin-Debian-x86_64.deb +[rl-linux-a64-musl]: https://github.com/maplibre/martin/releases/latest/download/martin-Linux-aarch64-musl.tar.gz +[rl-macos-x64]: https://github.com/maplibre/martin/releases/latest/download/martin-Darwin-x86_64.tar.gz +[rl-macos-a64]: https://github.com/maplibre/martin/releases/latest/download/martin-Darwin-aarch64.tar.gz [rl-win64-zip]: https://github.com/maplibre/martin/releases/latest/download/martin-Windows-x86_64.zip -If you are using macOS and [Homebrew](https://brew.sh/) you can install martin using Homebrew tap. +If you are using macOS and [Homebrew](https://brew.sh/) you can install `martin` and `mbtiles` using Homebrew tap. ```shell -brew tap maplibre/martin https://github.com/maplibre/martin.git -brew install maplibre/martin/martin +brew tap maplibre/martin +brew install martin ``` ## Running Martin Service -_See [running instructions](https://maplibre.org/martin/run.html) in the Martin book._ +_See [running instructions](https://maplibre.org/martin/20-run.html) in the Martin book._ Martin supports any number of PostgreSQL/PostGIS database connections with [geospatial-enabled](https://postgis.net/docs/using_postgis_dbmanagement.html#geometry_columns) tables and tile-producing SQL functions, as well as [PMTile](https://protomaps.com/blog/pmtiles-v3-whats-new) and [MBTile](https://github.com/mapbox/mbtiles-spec) files as tile sources. -Martin can auto-discover tables and functions using a [connection string](https://maplibre.org/martin/PostgreSQL-Connection-String.html). A PG connection string can also be passed via the `DATABASE_URL` environment variable. +Martin can auto-discover tables and functions using a [connection string](https://maplibre.org/martin/31-pg-connections.html). A PG connection string can also be passed via the `DATABASE_URL` environment variable. Each tile source will have a [TileJSON](https://github.com/mapbox/tilejson-spec) endpoint. @@ -68,7 +70,7 @@ martin --config config.yaml ``` #### Docker Example -_See [Docker instructions](https://maplibre.org/martin/run-with-docker.html) in the Martin book._ +_See [Docker instructions](https://maplibre.org/martin/22-run-with-docker.html) in the Martin book._ Martin is also available as a [Docker image](https://ghcr.io/maplibre/martin). You could either share a configuration file from the host with the container via the `-v` param, or you can let Martin auto-discover all sources e.g. by passing `DATABASE_URL` or specifying the .mbtiles/.pmtiles files. @@ -82,20 +84,22 @@ docker run -p 3000:3000 \ ``` ## API -_See [API documentation](https://maplibre.org/martin/using.html) in the Martin book._ +_See [API documentation](https://maplibre.org/martin/40-using-endpoints.html) in the Martin book._ Martin data is available via the HTTP `GET` endpoints: -| URL | Description | -|----------------------------------------|-----------------------------------------------| -| `/` | Status text, that will eventually show web UI | -| `/catalog` | List of all sources | -| `/{sourceID}` | Source TileJSON | -| `/{sourceID}/{z}/{x}/{y}` | Map Tiles | -| `/{source1},...,{sourceN}` | Composite Source TileJSON | -| `/{source1},...,{sourceN}/{z}/{x}/{y}` | Composite Source Tiles | -| `/sprite/{spriteID}[@2x].{json,png}` | Sprites (low and high DPI, index/png) | -| `/health` | Martin server health check: returns 200 `OK` | +| URL | Description | +|-----------------------------------------|-----------------------------------------------| +| `/` | Status text, that will eventually show web UI | +| `/catalog` | List of all sources | +| `/{sourceID}` | Source TileJSON | +| `/{sourceID}/{z}/{x}/{y}` | Map Tiles | +| `/{source1},…,{sourceN}` | Composite Source TileJSON | +| `/{source1},…,{sourceN}/{z}/{x}/{y}` | Composite Source Tiles | +| `/sprite/{spriteID}[@2x].{json,png}` | Sprites (low and high DPI, index/png) | +| `/font/{font}/{start}-{end}` | Font source | +| `/font/{font1},…,{fontN}/{start}-{end}` | Composite Font source | +| `/health` | Martin server health check: returns 200 `OK` | ## Documentation See [Martin book](https://maplibre.org/martin/) for complete documentation. diff --git a/arm64.Dockerfile b/arm64.Dockerfile index 4912078d2..e15c42712 100644 --- a/arm64.Dockerfile +++ b/arm64.Dockerfile @@ -4,12 +4,11 @@ WORKDIR /usr/src/martin RUN apt-get update \ && apt-get install -y --no-install-recommends \ - libssl-dev \ perl \ && rm -rf /var/lib/apt/lists/* COPY . . -RUN CARGO_REGISTRIES_CRATES_IO_PROTOCOL=sparse cargo build --release --features=vendored-openssl +RUN CARGO_REGISTRIES_CRATES_IO_PROTOCOL=sparse cargo build --release FROM debian:bullseye-slim diff --git a/debian/config.yaml b/debian/config.yaml index aacb3b20a..f59118cbf 100644 --- a/debian/config.yaml +++ b/debian/config.yaml @@ -7,14 +7,14 @@ listen_addresses: '0.0.0.0:3000' # Number of web server workers worker_processes: 8 -# see https://maplibre.org/martin/config-file.html +# see https://maplibre.org/martin/30-config-file.html # postgres: # connection_string: 'postgresql://postgres@localhost:5432/db' # default_srid: 4326 # pool_size: 20 # max_feature_count: 1000 -# disable_bounds: false +# auto_bounds: skip # pmtiles: # paths: @@ -29,3 +29,7 @@ worker_processes: 8 # - /path/to/mbtiles.mbtiles # sources: # mb-src1: /path/to/mbtiles1.mbtiles + +# fonts: +# - /path/to/font/file.ttf +# - /path/to/font_dir diff --git a/demo/frontend/src/Components/Map/Filters/DayPicker.ts b/demo/frontend/src/Components/Map/Filters/DayPicker.ts index 655b00a00..e5abda1b3 100644 --- a/demo/frontend/src/Components/Map/Filters/DayPicker.ts +++ b/demo/frontend/src/Components/Map/Filters/DayPicker.ts @@ -11,7 +11,7 @@ export default styled.div` } .DayPicker-Caption > div { - font-weight: bold; + font-weight: bold; color: #dadfee; } diff --git a/demo/frontend/yarn.lock b/demo/frontend/yarn.lock index c3b4ffff1..c79e0e5d1 100644 --- a/demo/frontend/yarn.lock +++ b/demo/frontend/yarn.lock @@ -23,6 +23,14 @@ "@babel/highlight" "^7.22.10" chalk "^2.4.2" +"@babel/code-frame@^7.22.13": + version "7.22.13" + resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.22.13.tgz#e3c1c099402598483b7a8c46a721d1038803755e" + integrity sha512-XktuhWlJ5g+3TJXc5upd9Ks1HutSArik6jf2eAjYFyIOf4ej3RN+184cZbzDvbPnuTJIUhPKKJE3cIsYTiAT3w== + dependencies: + "@babel/highlight" "^7.22.13" + chalk "^2.4.2" + "@babel/compat-data@^7.22.5", "@babel/compat-data@^7.22.6", "@babel/compat-data@^7.22.9": version "7.22.9" resolved "https://registry.yarnpkg.com/@babel/compat-data/-/compat-data-7.22.9.tgz#71cdb00a1ce3a329ce4cbec3a44f9fef35669730" @@ -68,6 +76,16 @@ "@jridgewell/trace-mapping" "^0.3.17" jsesc "^2.5.1" +"@babel/generator@^7.23.0": + version "7.23.0" + resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.23.0.tgz#df5c386e2218be505b34837acbcb874d7a983420" + integrity sha512-lN85QRR+5IbYrMWM6Y4pE/noaQtg4pNiqeNGX60eqOfo6gtEj6uw/JagelB8vVztSd7R6M5n1+PQkDbHbBRU4g== + dependencies: + "@babel/types" "^7.23.0" + "@jridgewell/gen-mapping" "^0.3.2" + "@jridgewell/trace-mapping" "^0.3.17" + jsesc "^2.5.1" + "@babel/helper-annotate-as-pure@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.22.5.tgz#e7f06737b197d580a01edf75d97e2c8be99d3882" @@ -128,6 +146,11 @@ lodash.debounce "^4.0.8" resolve "^1.14.2" +"@babel/helper-environment-visitor@^7.22.20": + version "7.22.20" + resolved "https://registry.yarnpkg.com/@babel/helper-environment-visitor/-/helper-environment-visitor-7.22.20.tgz#96159db61d34a29dba454c959f5ae4a649ba9167" + integrity sha512-zfedSIzFhat/gFhWfHtgWvlec0nqB9YEIVrpuwjruLlXfUSnA8cJB0miHKwqDnQ7d32aKo2xt88/xZptwxbfhA== + "@babel/helper-environment-visitor@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/helper-environment-visitor/-/helper-environment-visitor-7.22.5.tgz#f06dd41b7c1f44e1f8da6c4055b41ab3a09a7e98" @@ -141,6 +164,14 @@ "@babel/template" "^7.22.5" "@babel/types" "^7.22.5" +"@babel/helper-function-name@^7.23.0": + version "7.23.0" + resolved "https://registry.yarnpkg.com/@babel/helper-function-name/-/helper-function-name-7.23.0.tgz#1f9a3cdbd5b2698a670c30d2735f9af95ed52759" + integrity sha512-OErEqsrxjZTJciZ4Oo+eoZqeW9UIiOcuYKRJA4ZAgV9myA+pOXhhmpfNCKjEH/auVfEYVFJ6y1Tc4r0eIApqiw== + dependencies: + "@babel/template" "^7.22.15" + "@babel/types" "^7.23.0" + "@babel/helper-hoist-variables@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/helper-hoist-variables/-/helper-hoist-variables-7.22.5.tgz#c01a007dac05c085914e8fb652b339db50d823bb" @@ -229,6 +260,11 @@ resolved "https://registry.yarnpkg.com/@babel/helper-string-parser/-/helper-string-parser-7.22.5.tgz#533f36457a25814cf1df6488523ad547d784a99f" integrity sha512-mM4COjgZox8U+JcXQwPijIZLElkgEpO5rsERVDJTc2qfCDfERyob6k5WegS14SX18IIjv+XD+GrqNumY5JRCDw== +"@babel/helper-validator-identifier@^7.22.20": + version "7.22.20" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.20.tgz#c4ae002c61d2879e724581d96665583dbc1dc0e0" + integrity sha512-Y4OZ+ytlatR8AI+8KZfKuL5urKp7qey08ha31L8b3BwewJAoJamTzyvxPR/5D+KkdJCGPq/+8TukHBlY10FX9A== + "@babel/helper-validator-identifier@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.22.5.tgz#9544ef6a33999343c8740fa51350f30eeaaaf193" @@ -266,11 +302,25 @@ chalk "^2.4.2" js-tokens "^4.0.0" +"@babel/highlight@^7.22.13": + version "7.22.20" + resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.22.20.tgz#4ca92b71d80554b01427815e06f2df965b9c1f54" + integrity sha512-dkdMCN3py0+ksCgYmGG8jKeGA/8Tk+gJwSYYlFGxG5lmhfKNoAy004YpLxpS1W2J8m/EK2Ew+yOs9pVRwO89mg== + dependencies: + "@babel/helper-validator-identifier" "^7.22.20" + chalk "^2.4.2" + js-tokens "^4.0.0" + "@babel/parser@^7.22.10", "@babel/parser@^7.22.5": version "7.22.10" resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.22.10.tgz#e37634f9a12a1716136c44624ef54283cabd3f55" integrity sha512-lNbdGsQb9ekfsnjFGhEiF4hfFqGgfOP3H3d27re3n+CGhNuTSUEQdfWk556sTLNTloczcdM5TYF2LhzmDQKyvQ== +"@babel/parser@^7.22.15", "@babel/parser@^7.23.0": + version "7.23.0" + resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.23.0.tgz#da950e622420bf96ca0d0f2909cdddac3acd8719" + integrity sha512-vvPKKdMemU85V9WE/l5wZEmImpCtLqbnTvqDS2U1fJ96KrxoW7KrXhNsNCblQlg8Ck4b85yxdTyelsMUgFUXiw== + "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.22.5.tgz#87245a21cd69a73b0b81bcda98d443d6df08f05e" @@ -1094,6 +1144,15 @@ dependencies: regenerator-runtime "^0.14.0" +"@babel/template@^7.22.15": + version "7.22.15" + resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.22.15.tgz#09576efc3830f0430f4548ef971dde1350ef2f38" + integrity sha512-QPErUVm4uyJa60rkI73qneDacvdvzxshT3kksGqlGWYdOTIUOwJ7RDUL8sGqslY1uXWSL6xMFKEXDS3ox2uF0w== + dependencies: + "@babel/code-frame" "^7.22.13" + "@babel/parser" "^7.22.15" + "@babel/types" "^7.22.15" + "@babel/template@^7.22.5": version "7.22.5" resolved "https://registry.yarnpkg.com/@babel/template/-/template-7.22.5.tgz#0c8c4d944509875849bd0344ff0050756eefc6ec" @@ -1104,18 +1163,18 @@ "@babel/types" "^7.22.5" "@babel/traverse@^7.22.10", "@babel/traverse@^7.4.5": - version "7.22.10" - resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.22.10.tgz#20252acb240e746d27c2e82b4484f199cf8141aa" - integrity sha512-Q/urqV4pRByiNNpb/f5OSv28ZlGJiFiiTh+GAHktbIrkPhPbl90+uW6SmpoLyZqutrg9AEaEf3Q/ZBRHBXgxig== - dependencies: - "@babel/code-frame" "^7.22.10" - "@babel/generator" "^7.22.10" - "@babel/helper-environment-visitor" "^7.22.5" - "@babel/helper-function-name" "^7.22.5" + version "7.23.2" + resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.23.2.tgz#329c7a06735e144a506bdb2cad0268b7f46f4ad8" + integrity sha512-azpe59SQ48qG6nu2CzcMLbxUudtN+dOM9kDbUqGq3HXUJRlo7i8fvPoxQUzYgLZ4cMVmuZgm8vvBpNeRhd6XSw== + dependencies: + "@babel/code-frame" "^7.22.13" + "@babel/generator" "^7.23.0" + "@babel/helper-environment-visitor" "^7.22.20" + "@babel/helper-function-name" "^7.23.0" "@babel/helper-hoist-variables" "^7.22.5" "@babel/helper-split-export-declaration" "^7.22.6" - "@babel/parser" "^7.22.10" - "@babel/types" "^7.22.10" + "@babel/parser" "^7.23.0" + "@babel/types" "^7.23.0" debug "^4.1.0" globals "^11.1.0" @@ -1128,6 +1187,15 @@ "@babel/helper-validator-identifier" "^7.22.5" to-fast-properties "^2.0.0" +"@babel/types@^7.22.15", "@babel/types@^7.23.0": + version "7.23.0" + resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.23.0.tgz#8c1f020c9df0e737e4e247c0619f58c68458aaeb" + integrity sha512-0oIyUfKoI3mSqMvsxBdclDwxXKXAUA8v/apZbc+iSyARYou1o8ZGDxbUYyLFoW2arqS2jDGqJuZvv1d/io1axg== + dependencies: + "@babel/helper-string-parser" "^7.22.5" + "@babel/helper-validator-identifier" "^7.22.20" + to-fast-properties "^2.0.0" + "@emotion/is-prop-valid@^1.1.0": version "1.2.1" resolved "https://registry.yarnpkg.com/@emotion/is-prop-valid/-/is-prop-valid-1.2.1.tgz#23116cf1ed18bfeac910ec6436561ecb1a3885cc" diff --git a/docker-compose.yml b/docker-compose.yml index 1bab0390d..04754d296 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -72,6 +72,35 @@ services: - PGPASSWORD=postgres volumes: - ./tests/fixtures:/fixtures + - ./tests/fixtures/initdb-dc-ssl.sh:/docker-entrypoint-initdb.d/10_martin.sh + - ./tests/fixtures/initdb-dc.sh:/docker-entrypoint-initdb.d/20_martin.sh + + db-ssl-cert: + # This should match the version of postgres used in the CI workflow + image: postgis/postgis:15-3.3 + command: + - "postgres" + - "-c" + - "ssl=on" + - "-c" + - "ssl_cert_file=/etc/ssl/certs/ssl-cert-snakeoil.pem" + - "-c" + - "ssl_key_file=/etc/ssl/private/ssl-cert-snakeoil.key" + restart: unless-stopped + ports: + - "${PGPORT:-5411}:5432" + environment: + # POSTGRES_* variables are used by the postgis/postgres image + # PG_* variables are used by psql + - POSTGRES_DB=db + - POSTGRES_USER=postgres + - POSTGRES_PASSWORD=postgres + - PGDATABASE=db + - PGUSER=postgres + - PGPASSWORD=postgres + volumes: + - ./tests/fixtures:/fixtures + - ./tests/fixtures/initdb-dc-ssl-cert.sh:/docker-entrypoint-initdb.d/10_martin.sh - ./tests/fixtures/initdb-dc.sh:/docker-entrypoint-initdb.d/20_martin.sh db-legacy: diff --git a/docs/src/introduction.md b/docs/src/00-introduction.md similarity index 90% rename from docs/src/introduction.md rename to docs/src/00-introduction.md index c9c0b95e5..975889105 100644 --- a/docs/src/introduction.md +++ b/docs/src/00-introduction.md @@ -12,4 +12,4 @@ See also [Martin demo site](https://martin.maplibre.org/) [![GitHub](https://img.shields.io/badge/github-maplibre/martin-8da0cb?logo=github)](https://github.com/maplibre/martin) [![crates.io version](https://img.shields.io/crates/v/martin.svg)](https://crates.io/crates/martin) [![Security audit](https://github.com/maplibre/martin/workflows/Security%20audit/badge.svg)](https://github.com/maplibre/martin/security) -[![CI build](https://github.com/maplibre/martin/workflows/CI/badge.svg)](https://github.com/maplibre/martin/actions) +[![CI build](https://github.com/maplibre/martin/actions/workflows/ci.yml/badge.svg)](https://github.com/maplibre/martin/actions) diff --git a/docs/src/installation.md b/docs/src/10-installation.md similarity index 77% rename from docs/src/installation.md rename to docs/src/10-installation.md index f1bf68969..114cf3807 100644 --- a/docs/src/installation.md +++ b/docs/src/10-installation.md @@ -1,8 +1,8 @@ -## Prerequisites +### Prerequisites If using Martin with PostgreSQL database, you must install PostGIS with at least v3.0+, v3.1+ recommended. -## Binary Distributions +### Binary Distributions You can download martin from [GitHub releases page](https://github.com/maplibre/martin/releases). @@ -16,7 +16,7 @@ You can download martin from [GitHub releases page](https://github.com/maplibre/ [rl-macos-tar]: https://github.com/maplibre/martin/releases/latest/download/martin-Darwin-x86_64.tar.gz [rl-win64-zip]: https://github.com/maplibre/martin/releases/latest/download/martin-Windows-x86_64.zip -# Building with Cargo +### Building with Cargo If you [install Rust](https://www.rust-lang.org/tools/install), you can build martin from source with Cargo: @@ -25,18 +25,16 @@ cargo install martin martin --help ``` -If your PostgreSQL connection requires SSL, you would need to install OpenSSL and run `cargo install martin --features ssl`, or even install with `--features vendored-openssl` to [statically link OpenSSL](https://docs.rs/openssl/latest/openssl/#vendored) into the binary. - -## Homebrew +### Homebrew If you are using macOS and [Homebrew](https://brew.sh/) you can install martin using Homebrew tap. ```shell -brew tap maplibre/martin https://github.com/maplibre/martin.git -brew install maplibre/martin/martin +brew tap maplibre/martin +brew install martin ``` -## Docker +### Docker Martin is also available as a [Docker image](https://ghcr.io/maplibre/martin). You could either share a configuration file from the host with the container via the `-v` param, or you can let Martin auto-discover all sources e.g. by passing `DATABASE_URL` or specifying the .mbtiles/.pmtiles files. diff --git a/docs/src/run.md b/docs/src/20-run.md similarity index 52% rename from docs/src/run.md rename to docs/src/20-run.md index 4007b4cd0..0c1f3c45e 100644 --- a/docs/src/run.md +++ b/docs/src/20-run.md @@ -1,6 +1,6 @@ # Usage -Martin requires at least one PostgreSQL [connection string](pg-connections.md) or a [tile source file](sources-files.md) as a command-line argument. A PG connection string can also be passed via the `DATABASE_URL` environment variable. +Martin requires at least one PostgreSQL [connection string](31-pg-connections.md) or a [tile source file](34-sources-files.md) as a command-line argument. A PG connection string can also be passed via the `DATABASE_URL` environment variable. ```shell martin postgresql://postgres@localhost/db diff --git a/docs/src/env-vars.md b/docs/src/21-env-vars.md similarity index 93% rename from docs/src/env-vars.md rename to docs/src/21-env-vars.md index 54c686439..c55a65209 100644 --- a/docs/src/env-vars.md +++ b/docs/src/21-env-vars.md @@ -1,6 +1,6 @@ -# Environment Variables +## Environment Variables -You can also configure Martin using environment variables, but only if the configuration file is not used. See [configuration section](config-file.md) on how to use environment variables with config files. See also [SSL configuration](pg-connections.md#postgresql-ssl-connections) section below. +You can also configure Martin using environment variables, but only if the configuration file is not used. See [configuration section](30-config-file.md) on how to use environment variables with config files. See also [SSL configuration](31-pg-connections.md#postgresql-ssl-connections) section below. | Environment var
Config File key | Example | Description | |------------------------------------------|--------------------------------------|--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| diff --git a/docs/src/run-with-cli.md b/docs/src/21-run-with-cli.md similarity index 64% rename from docs/src/run-with-cli.md rename to docs/src/21-run-with-cli.md index d8c235654..c7c48487a 100644 --- a/docs/src/run-with-cli.md +++ b/docs/src/21-run-with-cli.md @@ -1,4 +1,4 @@ -# Command-line Interface +## Command-line Interface You can configure Martin using command-line interface. See `martin --help` or `cargo run -- --help` for more information. @@ -6,33 +6,54 @@ You can configure Martin using command-line interface. See `martin --help` or `c Usage: martin [OPTIONS] [CONNECTION]... Arguments: - [CONNECTION]... Connection strings, e.g. postgres://... or /path/to/files + [CONNECTION]... + Connection strings, e.g. postgres://... or /path/to/files Options: -c, --config Path to config file. If set, no tile source-related parameters are allowed + --save-config Save resulting config to a file or use "-" to print to stdout. By default, only print if sources are auto-detected + -s, --sprite Export a directory with SVG files as a sprite source. Can be specified multiple times + + -f, --font + Export a font file or a directory with font files as a font source (recursive). Can be specified multiple times + -k, --keep-alive Connection keep alive timeout. [DEFAULT: 75] + -l, --listen-addresses The socket address to bind. [DEFAULT: 0.0.0.0:3000] + -W, --workers Number of web server workers - -b, --disable-bounds - Disable the automatic generation of bounds for spatial PG tables + + -b, --auto-bounds + Specify how bounds should be computed for the spatial PG tables. [DEFAULT: quick] + + Possible values: + - quick: Compute table geometry bounds, but abort if it takes longer than 5 seconds + - calc: Compute table geometry bounds. The startup time may be significant. Make sure all GEO columns have indexes + - skip: Skip bounds calculation. The bounds will be set to the whole world + --ca-root-file Loads trusted root certificates from a file. The file should contain a sequence of PEM-formatted CA certificates + -d, --default-srid If a spatial PG table has SRID 0, then this default SRID will be used as a fallback + -p, --pool-size Maximum connections pool size [DEFAULT: 20] + -m, --max-feature-count Limit the number of features in a tile from a PG table source + -h, --help - Print help + Print help (see a summary with '-h') + -V, --version Print version ``` diff --git a/docs/src/run-with-docker.md b/docs/src/22-run-with-docker.md similarity index 98% rename from docs/src/run-with-docker.md rename to docs/src/22-run-with-docker.md index 08ffa8854..5b60bf445 100644 --- a/docs/src/run-with-docker.md +++ b/docs/src/22-run-with-docker.md @@ -1,4 +1,4 @@ -# Running with Docker +## Running with Docker You can use official Docker image [`ghcr.io/maplibre/martin`](https://ghcr.io/maplibre/martin) diff --git a/docs/src/run-with-docker-compose.md b/docs/src/23-run-with-docker-compose.md similarity index 96% rename from docs/src/run-with-docker-compose.md rename to docs/src/23-run-with-docker-compose.md index 0f4f04ad6..85873eae8 100644 --- a/docs/src/run-with-docker-compose.md +++ b/docs/src/23-run-with-docker-compose.md @@ -1,4 +1,4 @@ -# Running with Docker Compose +## Running with Docker Compose You can use example [`docker-compose.yml`](https://raw.githubusercontent.com/maplibre/martin/main/docker-compose.yml) file as a reference diff --git a/docs/src/run-with-nginx.md b/docs/src/24-run-with-nginx.md similarity index 96% rename from docs/src/run-with-nginx.md rename to docs/src/24-run-with-nginx.md index f3e6a0a02..6230f5e04 100644 --- a/docs/src/run-with-nginx.md +++ b/docs/src/24-run-with-nginx.md @@ -1,4 +1,4 @@ -# Using with NGINX +## Using with NGINX You can run Martin behind NGINX proxy, so you can cache frequently accessed tiles and reduce unnecessary pressure on the database. Here is an example `docker-compose.yml` file that runs Martin with NGINX and PostgreSQL. @@ -38,9 +38,9 @@ services: You can find an example NGINX configuration file [here](https://github.com/maplibre/martin/blob/main/demo/frontend/nginx.conf). -## Rewriting URLs +### Rewriting URLs -If you are running Martin behind NGINX proxy, you may want to rewrite the request URL to properly handle tile URLs in [TileJSON](using.md#source-tilejson). +If you are running Martin behind NGINX proxy, you may want to rewrite the request URL to properly handle tile URLs in [TileJSON](40-using-endpoints.md#source-tilejson). ```nginx location ~ /tiles/(?.*) { @@ -53,7 +53,7 @@ location ~ /tiles/(?.*) { } ``` -## Caching tiles +### Caching tiles You can also use NGINX to cache tiles. In the example, the maximum cache size is set to 10GB, and caching time is set to 1 hour for responses with codes 200, 204, and 302 and 1 minute for responses with code 404. diff --git a/docs/src/troubleshooting.md b/docs/src/25-troubleshooting.md similarity index 97% rename from docs/src/troubleshooting.md rename to docs/src/25-troubleshooting.md index 489c37f6c..0c46f4478 100644 --- a/docs/src/troubleshooting.md +++ b/docs/src/25-troubleshooting.md @@ -1,4 +1,4 @@ -# Troubleshooting +## Troubleshooting Log levels are controlled on a per-module basis, and by default all logging is disabled except for errors. Logging is controlled via the `RUST_LOG` environment variable. The value of this environment variable is a comma-separated list of logging directives. diff --git a/docs/src/config-file.md b/docs/src/30-config-file.md similarity index 94% rename from docs/src/config-file.md rename to docs/src/30-config-file.md index 8fe58a638..a4df42845 100644 --- a/docs/src/config-file.md +++ b/docs/src/30-config-file.md @@ -47,9 +47,11 @@ postgres: # Limit the number of table geo features included in a tile. Unlimited by default. max_feature_count: 1000 - # Control the automatic generation of bounds for spatial tables [default: false] - # If enabled, it will spend some time on startup to compute geometry bounds. - disable_bounds: false + # Control the automatic generation of bounds for spatial tables [default: quick] + # 'calc' - compute table geometry bounds on startup. + # 'quick' - same as 'calc', but the calculation will be aborted if it takes more than 5 seconds. + # 'skip' - do not compute table geometry bounds on startup. + auto_bounds: skip # Enable automatic discovery of tables and functions. # You may set this to `false` to disable. @@ -181,4 +183,10 @@ sprites: sources: # SVG images in this directory will be published as a "my_sprites" sprite source my_sprites: /path/to/some_dir + +# Font configuration +fonts: + # A list of *.otf, *.ttf, and *.ttc font files and dirs to search recursively. + - /path/to/font/file.ttf + - /path/to/font_dir ``` diff --git a/docs/src/pg-connections.md b/docs/src/31-pg-connections.md similarity index 88% rename from docs/src/pg-connections.md rename to docs/src/31-pg-connections.md index d887e4cfd..605cd560e 100644 --- a/docs/src/pg-connections.md +++ b/docs/src/31-pg-connections.md @@ -1,9 +1,9 @@ -# PostgreSQL Connection String +## PostgreSQL Connection String Martin supports many of the PostgreSQL connection string settings such as `host`, `port`, `user`, `password`, `dbname`, `sslmode`, `connect_timeout`, `keepalives`, `keepalives_idle`, etc. See the [PostgreSQL docs](https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-CONNSTRING) for more details. -## PostgreSQL SSL Connections +### PostgreSQL SSL Connections -Martin supports PostgreSQL `sslmode` including `disable`, `prefer`, `require`, `verify-ca` and `verify-full` modes as described in the [PostgreSQL docs](https://www.postgresql.org/docs/current/libpq-ssl.html). Certificates can be provided in the configuration file, or can be set using the same env vars as used for `psql`. When set as env vars, they apply to all PostgreSQL connections. See [environment vars](env-vars.md) section for more details. +Martin supports PostgreSQL `sslmode` including `disable`, `prefer`, `require`, `verify-ca` and `verify-full` modes as described in the [PostgreSQL docs](https://www.postgresql.org/docs/current/libpq-ssl.html). Certificates can be provided in the configuration file, or can be set using the same env vars as used for `psql`. When set as env vars, they apply to all PostgreSQL connections. See [environment vars](21-env-vars.md) section for more details. By default, `sslmode` is set to `prefer` which means that SSL is used if the server supports it, but the connection is not aborted if the server does not support it. This is the default behavior of `psql` and is the most compatible option. Use the `sslmode` param to set a different `sslmode`, e.g. `postgresql://user:password@host/db?sslmode=require`. diff --git a/docs/src/sources-pg-tables.md b/docs/src/32-sources-pg-tables.md similarity index 79% rename from docs/src/sources-pg-tables.md rename to docs/src/32-sources-pg-tables.md index 928b49f0e..6514acc1d 100644 --- a/docs/src/sources-pg-tables.md +++ b/docs/src/32-sources-pg-tables.md @@ -1,8 +1,8 @@ -# Table Sources +## Table Sources -Table Source is a database table which can be used to query [vector tiles](https://github.com/mapbox/vector-tile-spec). If a [PostgreSQL connection string](pg-connections.md) is given, Martin will publish all tables as data sources if they have at least one geometry column. If geometry column SRID is 0, a default SRID must be set, or else that geo-column/table will be ignored. All non-geometry table columns will be published as vector tile feature tags (properties). +Table Source is a database table which can be used to query [vector tiles](https://github.com/mapbox/vector-tile-spec). If a [PostgreSQL connection string](31-pg-connections.md) is given, Martin will publish all tables as data sources if they have at least one geometry column. If geometry column SRID is 0, a default SRID must be set, or else that geo-column/table will be ignored. All non-geometry table columns will be published as vector tile feature tags (properties). -# Modifying Tilejson +### Modifying Tilejson Martin will automatically generate a `TileJSON` manifest for each table source. It will contain the `name`, `description`, `minzoom`, `maxzoom`, `bounds` and `vector_layer` information. For example, if there is a table `public.table_source`: @@ -39,9 +39,9 @@ The TileJSON: } ``` -By default the `description` and `name` is database identifies about this table, and the bounds is queried from database. You can fine tune these by adjusting `auto_publish` section in [configuration file](https://maplibre.org/martin/config-file.html#config-example). +By default the `description` and `name` is database identifies about this table, and the bounds is queried from database. You can fine tune these by adjusting `auto_publish` section in [configuration file](https://maplibre.org/martin/30-config-file.html#config-example). -## TileJSON in SQL Comments +#### TileJSON in SQL Comments Other than adjusting `auto_publish` section in configuration file, you can fine tune the `TileJSON` on the database side directly: Add a valid JSON as an SQL comment on the table. diff --git a/docs/src/sources-pg-functions.md b/docs/src/33-sources-pg-functions.md similarity index 97% rename from docs/src/sources-pg-functions.md rename to docs/src/33-sources-pg-functions.md index 7f8906c9c..b18c83862 100644 --- a/docs/src/sources-pg-functions.md +++ b/docs/src/33-sources-pg-functions.md @@ -1,4 +1,4 @@ -# PostgreSQL Function Sources +## PostgreSQL Function Sources Function Source is a database function which can be used to query [vector tiles](https://github.com/mapbox/vector-tile-spec). When started, Martin will look for the functions with a suitable signature. A function that takes `z integer` (or `zoom integer`), `x integer`, `y integer`, and an optional `query json` and returns `bytea`, can be used as a Function Source. Alternatively the function could return a record with a single `bytea` field, or a record with two fields of types `bytea` and `text`, where the `text` field is an etag key (i.e. md5 hash). @@ -9,7 +9,7 @@ Function Source is a database function which can be used to query [vector tiles] | y | integer | Tile y parameter | | query (optional, any name) | json | Query string parameters | -## Simple Function +### Simple Function For example, if you have a table `table_source` in WGS84 (`4326` SRID), then you can use this function as a Function Source: ```sql, ignore @@ -34,7 +34,7 @@ END $$ LANGUAGE plpgsql IMMUTABLE STRICT PARALLEL SAFE; ``` -## Function with Query Parameters +### Function with Query Parameters Users may add a `query` parameter to pass additional parameters to the function. _**TODO**: Modify this example to actually use the query parameters._ @@ -97,7 +97,7 @@ You can access this params using [json operators](https://www.postgresql.org/doc ...WHERE answer = (query_params->'objectParam'->>'answer')::int; ``` -## Modifying TileJSON +### Modifying TileJSON Martin will automatically generate a basic [TileJSON](https://github.com/mapbox/tilejson-spec) manifest for each function source that will contain the name and description of the function, plus optionally `minzoom`, `maxzoom`, and `bounds` (if they were specified via one of the configuration methods). For example, if there is a function `public.function_zxy_query_jsonb`, the default `TileJSON` might look like this (note that URL will be automatically adjusted to match the request host): @@ -112,7 +112,7 @@ Martin will automatically generate a basic [TileJSON](https://github.com/mapbox/ } ``` -### TileJSON in SQL Comments +#### TileJSON in SQL Comments To modify automatically generated `TileJSON`, you can add a valid JSON as an SQL comment on the function. Martin will merge function comment into the generated `TileJSON` using [JSON Merge patch](https://www.rfc-editor.org/rfc/rfc7386). The following example adds `attribution` and `version` fields to the `TileJSON`. diff --git a/docs/src/sources-files.md b/docs/src/34-sources-files.md similarity index 63% rename from docs/src/sources-files.md rename to docs/src/34-sources-files.md index 8f80ba46b..85476f5bc 100644 --- a/docs/src/sources-files.md +++ b/docs/src/34-sources-files.md @@ -1,4 +1,4 @@ -# MBTiles and PMTiles File Sources +## MBTiles and PMTiles File Sources Martin can serve any type of tiles from [PMTile](https://protomaps.com/blog/pmtiles-v3-whats-new) and [MBTile](https://github.com/mapbox/mbtiles-spec) files. To serve a file from CLI, simply put the path to the file or the directory with `*.mbtiles` or `*.pmtiles` files. For example: @@ -6,4 +6,4 @@ Martin can serve any type of tiles from [PMTile](https://protomaps.com/blog/pmti martin /path/to/mbtiles/file.mbtiles /path/to/directory ``` -You may also want to generate a [config file](config-file.md) using the `--save-config my-config.yaml`, and later edit it and use it with `--config my-config.yaml` option. +You may also want to generate a [config file](30-config-file.md) using the `--save-config my-config.yaml`, and later edit it and use it with `--config my-config.yaml` option. diff --git a/docs/src/sources-composite.md b/docs/src/35-sources-composite.md similarity index 97% rename from docs/src/sources-composite.md rename to docs/src/35-sources-composite.md index e580ab9f3..255b5caf0 100644 --- a/docs/src/sources-composite.md +++ b/docs/src/35-sources-composite.md @@ -1,4 +1,4 @@ -# Composite Sources +## Composite Sources Composite Sources allows combining multiple sources into one. Composite Source consists of multiple sources separated by comma `{source1},...,{sourceN}` diff --git a/docs/src/sources-sprites.md b/docs/src/36-sources-sprites.md similarity index 90% rename from docs/src/sources-sprites.md rename to docs/src/36-sources-sprites.md index 9925360c0..4cd713996 100644 --- a/docs/src/sources-sprites.md +++ b/docs/src/36-sources-sprites.md @@ -1,6 +1,6 @@ -# Sprite Sources +## Sprite Sources -Given a directory with SVG images, Martin will generate a sprite -- a JSON index and a PNG image, for both low and high resolution displays. The SVG filenames without extension will be used as the sprite image IDs. The images are searched recursively in the given directory, so subdirectory names will be used as prefixes for the image IDs, e.g. `icons/bicycle.svg` will be available as `icons/bicycle` sprite image. +Given a directory with SVG images, Martin will generate a sprite -- a JSON index and a PNG image, for both low and high resolution displays. The SVG filenames without extension will be used as the sprite image IDs. The images are searched recursively in the given directory, so subdirectory names will be used as prefixes for the image IDs, e.g. `icons/bicycle.svg` will be available as `icons/bicycle` sprite image. The sprite generation is not yet cached, and may require external reverse proxy or CDN for faster operation. ### API Martin uses [MapLibre sprites API](https://maplibre.org/maplibre-style-spec/sprite/) specification to serve sprites via several endpoints. The sprite image and index are generated on the fly, so if the sprite directory is updated, the changes will be reflected immediately. @@ -39,7 +39,7 @@ martin --sprite /path/to/sprite_a --sprite /path/to/other/sprite_b ### Configuring with Config File -A sprite directory can be configured from the config file with the `sprite` key, similar to how [MBTiles and PMTiles](config-file.md) are configured. +A sprite directory can be configured from the config file with the `sprite` key, similar to how [MBTiles and PMTiles](30-config-file.md) are configured. ```yaml # Sprite configuration diff --git a/docs/src/37-sources-fonts.md b/docs/src/37-sources-fonts.md new file mode 100644 index 000000000..65f055a9c --- /dev/null +++ b/docs/src/37-sources-fonts.md @@ -0,0 +1,74 @@ +## Font Sources + +Martin can serve glyph ranges from `otf`, `ttf`, and `ttc` fonts as needed by MapLibre text rendering. Martin will generate them dynamically on the fly. +The glyph range generation is not yet cached, and may require external reverse proxy or CDN for faster operation. + +## API +Fonts ranges are available either for a single font, or a combination of multiple fonts. The font names are case-sensitive and should match the font name in the font file as published in the catalog. Make sure to URL-escape font names as they usually contain spaces. + +| | Font Request | +|---------|--------------------------------------| +| Pattern | `/font/{name}/{start}-{end}` | +| Example | `/font/Overpass%20Mono%20Bold/0-255` | + + +### Composite Font Request + +When combining multiple fonts, the glyph range will contain glyphs from the first listed font if available, and fallback to the next font if the glyph is not available in the first font, etc. The glyph range will be empty if none of the fonts contain the glyph. + +| | Composite Font Request with fallbacks | +|---------|--------------------------------------------------------------| +| Pattern | `/font/{name1},…,{nameN}/{start}-{end}` | +| Example | `/font/Overpass%20Mono%20Bold,Overpass%20Mono%20Light/0-255` | + +### Catalog +Martin will show all available fonts at the `/catalog` endpoint. + +```shell +curl http://127.0.0.1:3000/catalog +{ + "fonts": { + "Overpass Mono Bold": { + "family": "Overpass Mono", + "style": "Bold", + "glyphs": 931, + "start": 0, + "end": 64258 + }, + "Overpass Mono Light": { + "family": "Overpass Mono", + "style": "Light", + "glyphs": 931, + "start": 0, + "end": 64258 + }, + "Overpass Mono SemiBold": { + "family": "Overpass Mono", + "style": "SemiBold", + "glyphs": 931, + "start": 0, + "end": 64258 + } + } +} +``` + +## Using from CLI + +A font file or directory can be configured from the [CLI](21-run-with-cli.md) with one or more `--font` parameters. + +```shell +martin --font /path/to/font/file.ttf --font /path/to/font_dir +``` + +## Configuring from Config File + +A font directory can be configured from the config file with the `fonts` key. + +```yaml +# Fonts configuration +fonts: + # A list of *.otf, *.ttf, and *.ttc font files and dirs to search recursively. + - /path/to/font/file.ttf + - /path/to/font_dir +``` diff --git a/docs/src/40-using-endpoints.md b/docs/src/40-using-endpoints.md new file mode 100644 index 000000000..10df67945 --- /dev/null +++ b/docs/src/40-using-endpoints.md @@ -0,0 +1,79 @@ +## Martin Endpoints + +Martin data is available via the HTTP `GET` endpoints: + +| URL | Description | +|-----------------------------------------|----------------------------------------------| +| `/` | Status text, that will eventually show web UI | +| `/catalog` | [List of all sources](#catalog) | +| `/{sourceID}` | [Source TileJSON](#source-tilejson) | +| `/{sourceID}/{z}/{x}/{y}` | Map Tiles | +| `/{source1},…,{sourceN}` | [Composite Source TileJSON](#source-tilejson) | +| `/{source1},…,{sourceN}/{z}/{x}/{y}` | [Composite Source Tiles](30-config-file.md) | +| `/sprite/{spriteID}[@2x].{json,png}` | [Sprite sources](36-sources-spritess.md) | +| `/font/{font}/{start}-{end}` | [Font source](37-sources-fonts.md) | +| `/font/{font1},…,{fontN}/{start}-{end}` | [Composite Font source](37-sources-fonts.md) | +| `/health` | Martin server health check: returns 200 `OK` | + +### Duplicate Source ID +In case there is more than one source that has the same name, e.g. a PG function is available in two schemas/connections, or a table has more than one geometry columns, sources will be assigned unique IDs such as `/points`, `/points.1`, etc. + +### Reserved Source IDs +Some source IDs are reserved for internal use. If you try to use them, they will be automatically renamed to a unique ID the same way as duplicate source IDs are handled, e.g. a `catalog` source will become `catalog.1`. + +Some of the reserved IDs: `_`, `catalog`, `config`, `font`, `health`, `help`, `index`, `manifest`, `metrics`, `refresh`, +`reload`, `sprite`, `status`. + +### Catalog + +A list of all available sources is available via catalogue endpoint: + +```shell +curl localhost:3000/catalog | jq +``` + +```yaml +{ + "tiles" { + "function_zxy_query": { + "name": "public.function_zxy_query", + "content_type": "application/x-protobuf" + }, + "points1": { + "name": "public.points1.geom", + "content_type": "image/webp" + }, + ... + }, + "sprites": { + "cool_icons": { + "images": [ + "bicycle", + "bear", + ] + }, + ... + }, + "fonts": { + "Noto Mono Regular": { + "family": "Noto Mono", + "style": "Regular", + "glyphs": 875, + "start": 0, + "end": 65533 + }, + ... + } +} +``` + +### Source TileJSON + +All tile sources have a [TileJSON](https://github.com/mapbox/tilejson-spec) endpoint available at the `/{SourceID}`. + +For example, a `points` function or a table will be available as `/points`. Composite source combining `points` and `lines` sources will be available at `/points,lines` endpoint. + +```shell +curl localhost:3000/points | jq +curl localhost:3000/points,lines | jq +``` diff --git a/docs/src/using-with-maplibre.md b/docs/src/41-using-with-maplibre.md similarity index 85% rename from docs/src/using-with-maplibre.md rename to docs/src/41-using-with-maplibre.md index 3b63e81d1..edfd22195 100644 --- a/docs/src/using-with-maplibre.md +++ b/docs/src/41-using-with-maplibre.md @@ -1,8 +1,8 @@ -# Using with MapLibre +## Using with MapLibre [MapLibre](https://maplibre.org/projects/maplibre-gl-js/) is an Open-source JavaScript library for showing maps on a website. MapLibre can accept [MVT vector tiles](https://github.com/mapbox/vector-tile-spec) generated by Martin, and applies [a style](https://maplibre.org/maplibre-gl-js-docs/style-spec/) to them to draw a map using Web GL. -You can add a layer to the map and specify Martin [TileJSON](https://github.com/mapbox/tilejson-spec) endpoint as a vector source URL. You should also specify a `source-layer` property. For [Table Sources](sources-pg-tables.md) it is `{table_name}` by default. +You can add a layer to the map and specify Martin [TileJSON](https://github.com/mapbox/tilejson-spec) endpoint as a vector source URL. You should also specify a `source-layer` property. For [Table Sources](32-sources-pg-tables.md) it is `{table_name}` by default. ```js @@ -36,7 +36,7 @@ map.addLayer({ }); ``` -You can also combine multiple sources into one source with [Composite Sources](sources-composite.md). Each source in a composite source can be accessed with its `{source_name}` as a `source-layer` property. +You can also combine multiple sources into one source with [Composite Sources](35-sources-composite.md). Each source in a composite source can be accessed with its `{source_name}` as a `source-layer` property. ```js map.addSource('points', { diff --git a/docs/src/using-with-leaflet.md b/docs/src/42-using-with-leaflet.md similarity index 96% rename from docs/src/using-with-leaflet.md rename to docs/src/42-using-with-leaflet.md index 842a18039..a0eaeba21 100644 --- a/docs/src/using-with-leaflet.md +++ b/docs/src/42-using-with-leaflet.md @@ -1,4 +1,4 @@ -# Using with Leaflet +## Using with Leaflet [Leaflet](https://github.com/Leaflet/Leaflet) is the leading open-source JavaScript library for mobile-friendly interactive maps. diff --git a/docs/src/using-with-deck-gl.md b/docs/src/43-using-with-deck-gl.md similarity index 98% rename from docs/src/using-with-deck-gl.md rename to docs/src/43-using-with-deck-gl.md index 723cd9108..b459564f0 100644 --- a/docs/src/using-with-deck-gl.md +++ b/docs/src/43-using-with-deck-gl.md @@ -1,4 +1,4 @@ -# Using with deck.gl +## Using with deck.gl [deck.gl](https://deck.gl/) is a WebGL-powered framework for visual exploratory data analysis of large datasets. diff --git a/docs/src/using-with-mapbox.md b/docs/src/44-using-with-mapbox.md similarity index 61% rename from docs/src/using-with-mapbox.md rename to docs/src/44-using-with-mapbox.md index 9203d885a..e0d1ac0ca 100644 --- a/docs/src/using-with-mapbox.md +++ b/docs/src/44-using-with-mapbox.md @@ -1,8 +1,8 @@ -# Using with Mapbox +## Using with Mapbox -[Mapbox GL JS](https://github.com/mapbox/mapbox-gl-js) is a JavaScript library for interactive, customizable vector maps on the web. Mapbox GL JS v1.x was open source, and it was forked as MapLibre, so using Martin with Mapbox is similar to MapLibre described [here](using-with-maplibre.md). Mapbox GL JS can accept [MVT vector tiles](https://github.com/mapbox/vector-tile-spec) generated by Martin, and applies [a style](https://docs.mapbox.com/mapbox-gl-js/style-spec/) to them to draw a map using Web GL. +[Mapbox GL JS](https://github.com/mapbox/mapbox-gl-js) is a JavaScript library for interactive, customizable vector maps on the web. Mapbox GL JS v1.x was open source, and it was forked as MapLibre, so using Martin with Mapbox is similar to MapLibre described [here](41-using-with-maplibre.md). Mapbox GL JS can accept [MVT vector tiles](https://github.com/mapbox/vector-tile-spec) generated by Martin, and applies [a style](https://docs.mapbox.com/mapbox-gl-js/style-spec/) to them to draw a map using Web GL. -You can add a layer to the map and specify Martin TileJSON endpoint as a vector source URL. You should also specify a `source-layer` property. For [Table Sources](sources-pg-tables.md) it is `{table_name}` by default. +You can add a layer to the map and specify Martin TileJSON endpoint as a vector source URL. You should also specify a `source-layer` property. For [Table Sources](32-sources-pg-tables.md) it is `{table_name}` by default. ```js map.addLayer({ diff --git a/docs/src/recipes.md b/docs/src/45-recipes.md similarity index 93% rename from docs/src/recipes.md rename to docs/src/45-recipes.md index e2a3e6de2..fb2dfe6d9 100644 --- a/docs/src/recipes.md +++ b/docs/src/45-recipes.md @@ -1,4 +1,6 @@ -## Using with DigitalOcean PostgreSQL +## Recipes + +### Using with DigitalOcean PostgreSQL You can use Martin with [Managed PostgreSQL from DigitalOcean](https://www.digitalocean.com/products/managed-databases-postgresql/) with PostGIS extension @@ -9,7 +11,7 @@ martin --ca-root-file ./ca-certificate.crt \ postgresql://user:password@host:port/db?sslmode=require ``` -## Using with Heroku PostgreSQL +### Using with Heroku PostgreSQL You can use Martin with [Managed PostgreSQL from Heroku](https://www.heroku.com/postgres) with PostGIS extension diff --git a/docs/src/50-tools.md b/docs/src/50-tools.md new file mode 100644 index 000000000..f581affec --- /dev/null +++ b/docs/src/50-tools.md @@ -0,0 +1,12 @@ +# CLI Tools + +Martin project contains additional tooling to help manage the data servable with Martin tile server. + +## `mbtiles` +`mbtiles` is a small utility to interact with the `*.mbtiles` files from the command line. It allows users to examine, copy, validate, compare, and apply diffs between them. + +Use `mbtiles --help` to see a list of available commands, and `mbtiles --help` to see help for a specific command. + +This tool can be installed by compiling the latest released version with `cargo install mbtiles`, or by downloading a pre-built binary from the [releases page](https://github.com/maplibre/martin/releases/latest). + +The `mbtiles` utility builds on top of the [MBTiles specification](https://github.com/mapbox/mbtiles-spec). It adds a few additional conventions to ensure that the content of the tile data is valid, and can be used for reliable diffing and patching of the tilesets. diff --git a/docs/src/51-mbtiles-meta.md b/docs/src/51-mbtiles-meta.md new file mode 100644 index 000000000..65930cb4c --- /dev/null +++ b/docs/src/51-mbtiles-meta.md @@ -0,0 +1,22 @@ +# `mbtiles` Metadata Access + +## meta-all +Print all metadata values to stdout, as well as the results of tile detection. The format of the values printed is not stable, and should only be used for visual inspection. + +```shell +mbtiles meta-all my_file.mbtiles +``` + +## meta-get +Retrieve raw metadata value by its name. The value is printed to stdout without any modifications. For example, to get the `description` value from an mbtiles file: + +```shell +mbtiles meta-get my_file.mbtiles description +``` + +## meta-set +Set metadata value by its name, or delete the key if no value is supplied. For example, to set the `description` value to `A vector tile dataset`: + +```shell +mbtiles meta-set my_file.mbtiles description "A vector tile dataset" +``` diff --git a/docs/src/52-mbtiles-copy.md b/docs/src/52-mbtiles-copy.md new file mode 100644 index 000000000..9057a3ac4 --- /dev/null +++ b/docs/src/52-mbtiles-copy.md @@ -0,0 +1,57 @@ +# Copying, Diffing, and Patching MBTiles + +## `mbtiles copy` +Copy command copies an mbtiles file, optionally filtering its content by zoom levels. + +```shell +mbtiles copy src_file.mbtiles dst_file.mbtiles \ + --min-zoom 0 --max-zoom 10 +``` + +This command can also be used to generate files of different [supported schema](##supported-schema). + +```shell +mbtiles copy normalized.mbtiles dst.mbtiles \ + --dst-mbttype flat-with-hash +``` + +## `mbtiles copy --diff-with-file` +Copy command can also be used to compare two mbtiles files and generate a delta (diff) file. The diff file can be applied to the `src_file.mbtiles` elsewhere, to avoid copying/transmitting the entire modified dataset. The delta file will contain all tiles that are different between the two files (modifications, insertions, and deletions as `NULL` values), for both the tile and metadata tables. + +There is one exception: `agg_tiles_hash` metadata value will be renamed to `agg_tiles_hash_in_diff`, and a new `agg_tiles_hash` will be generated for the diff file itself. This is done to avoid confusion when applying the diff file to the original file, as the `agg_tiles_hash` value will be different after the diff is applied. The `apply-diff` command will automatically rename the `agg_tiles_hash_in_diff` value back to `agg_tiles_hash` when applying the diff. + +```shell +mbtiles copy src_file.mbtiles diff_file.mbtiles \ + --diff-with-file modified_file.mbtiles +``` + +## `mbtiles copy --apply-patch` + +Copy a source file to destination while also applying the diff file generated by `copy --diff-with-file` command above to the destination mbtiles file. This allows safer application of the diff file, as the source file is not modified. + +```shell +mbtiles copy src_file.mbtiles dst_file.mbtiles \ + --apply-patch diff_file.mbtiles +``` + +## `mbtiles apply-patch` + +Apply the diff file generated from `copy` command above to an mbtiles file. The diff file can be applied to the `src_file.mbtiles` elsewhere, to avoid copying/transmitting the entire modified dataset. + +Note that the `agg_tiles_hash_in_diff` metadata value will be renamed to `agg_tiles_hash` when applying the diff. This is done to avoid confusion when applying the diff file to the original file, as the `agg_tiles_hash` value will be different after the diff is applied. + +```shell +mbtiles apply_diff src_file.mbtiles diff_file.mbtiles +``` + +#### Applying diff with SQLite +Another way to apply the diff is to use the `sqlite3` command line tool directly. This SQL will delete all tiles from `src_file.mbtiles` that are set to `NULL` in `diff_file.mbtiles`, and then insert or update all new tiles from `diff_file.mbtiles` into `src_file.mbtiles`, where both files are of `flat` type. The name of the diff file is passed as a query parameter to the sqlite3 command line tool, and then used in the SQL statements. Note that this does not update the `agg_tiles_hash` metadata value, so it will be incorrect after the diff is applied. + +```shell +sqlite3 src_file.mbtiles \ + -bail \ + -cmd ".parameter set @diffDbFilename diff_file.mbtiles" \ + "ATTACH DATABASE @diffDbFilename AS diffDb;" \ + "DELETE FROM tiles WHERE (zoom_level, tile_column, tile_row) IN (SELECT zoom_level, tile_column, tile_row FROM diffDb.tiles WHERE tile_data ISNULL);" \ + "INSERT OR REPLACE INTO tiles (zoom_level, tile_column, tile_row, tile_data) SELECT * FROM diffDb.tiles WHERE tile_data NOTNULL;" +``` diff --git a/docs/src/53-mbtiles-validation.md b/docs/src/53-mbtiles-validation.md new file mode 100644 index 000000000..e2a30851c --- /dev/null +++ b/docs/src/53-mbtiles-validation.md @@ -0,0 +1,38 @@ +# MBTiles Validation + +The original [MBTiles specification](https://github.com/mapbox/mbtiles-spec#readme) does not provide any guarantees for the content of the tile data in MBTiles. `mbtiles validate` assumes a few additional conventions and uses them to ensure that the content of the tile data is valid performing several validation steps. If the file is not valid, the command will print an error message and exit with a non-zero exit code. + +```shell +mbtiles validate src_file.mbtiles +``` + +## SQLite Integrity check +The `validate` command will run `PRAGMA integrity_check` on the file, and will fail if the result is not `ok`. The `--integrity-check` flag can be used to disable this check, or to make it more thorow with `full` value. Default is `quick`. + +## Schema check +The `validate` command will verify that the `tiles` table/view exists, and that it has the expected columns and indexes. It will also verify that the `metadata` table/view exists, and that it has the expected columns and indexes. + +## Per-tile validation +If the `.mbtiles` file uses [flat_with_hash](54-mbtiles-schema.md#flat-with-hash) or [normalized](54-mbtiles-schema.md#normalized) schema, the `validate` command will verify that the MD5 hash of the `tile_data` column matches the `tile_hash` or `tile_id` columns (depending on the schema). + +A typical Normalized schema generated by tools like [tilelive-copy](https://github.com/mapbox/TileLive#bintilelive-copy) use MD5 hash in the `tile_id` column. The Martin's `mbtiles` tool can use this hash to verify the content of each tile. We also define a new [flat-with-hash](54-mbtiles-schema.md#flat-with-hash) schema that stores the hash and tile data in the same table, allowing per-tile validation without the multiple table layout. + +Per-tile validation is not available for the `flat` schema, and will be skipped. + +## Aggregate Content Validation + +Per-tile validation will catch individual tile corruption, but it will not detect overall datastore corruption such as missing tiles, tiles that should not exist, or tiles with incorrect z/x/y values. For that, the `mbtiles` tool defines a new metadata value called `agg_tiles_hash`. + +The value is computed by hashing the combined value for all rows in the `tiles` table/view, ordered by z,x,y. The value is computed using the following SQL expression, which uses a custom `md5_concat_hex` function from [sqlite-hashes crate](https://crates.io/crates/sqlite-hashes): + +```sql, ignore +md5_concat_hex( + CAST(zoom_level AS TEXT), + CAST(tile_column AS TEXT), + CAST(tile_row AS TEXT), + tile_data) +``` + +In case there are no rows or all are NULL, the hash value of an empty string is used. Note that SQLite allows any value type to be stored as in any column, so if `tile_data` accidentally contains non-blob/text/null value, validation will fail. + +The `mbtiles` tool will compute `agg_tiles_hash` value when copying or validating mbtiles files. Use `--update-agg-tiles-hash` to force the value to be updated, even if it is incorrect or does not exist. diff --git a/docs/src/54-mbtiles-schema.md b/docs/src/54-mbtiles-schema.md new file mode 100644 index 000000000..188196b48 --- /dev/null +++ b/docs/src/54-mbtiles-schema.md @@ -0,0 +1,80 @@ +# MBTiles Schemas +The `mbtiles` tool builds on top of the original [MBTiles specification](https://github.com/mapbox/mbtiles-spec#readme) by specifying three different kinds of schema for `tiles` data: `flat`, `flat-with-hash`, and `normalized`. The `mbtiles` tool can convert between these schemas, and can also generate a diff between two files of any schemas, as well as merge multiple schema files into one file. + +## flat +Flat schema is the closest to the original MBTiles specification. It stores all tiles in a single table. This schema is the most efficient when the tileset contains no duplicate tiles. + +```sql, ignore +CREATE TABLE tiles ( + zoom_level INTEGER, + tile_column INTEGER, + tile_row INTEGER, + tile_data BLOB); + +CREATE UNIQUE INDEX tile_index on tiles ( + zoom_level, tile_column, tile_row); +``` + +## flat-with-hash +Similar to the `flat` schema, but also includes a `tile_hash` column that contains a hash value of the `tile_data` column. Use this schema when the tileset has no duplicate tiles, but you still want to be able to validate the content of each tile individually. + +```sql, ignore +CREATE TABLE tiles_with_hash ( + zoom_level INTEGER NOT NULL, + tile_column INTEGER NOT NULL, + tile_row INTEGER NOT NULL, + tile_data BLOB, + tile_hash TEXT); + +CREATE UNIQUE INDEX tiles_with_hash_index on tiles_with_hash ( + zoom_level, tile_column, tile_row); + +CREATE VIEW tiles AS + SELECT zoom_level, tile_column, tile_row, tile_data + FROM tiles_with_hash; +``` + +## normalized +Normalized schema is the most efficient when the tileset contains duplicate tiles. It stores all tile blobs in the `images` table, and stores the tile Z,X,Y coordinates in a `map` table. The `map` table contains a `tile_id` column that is a foreign key to the `images` table. The `tile_id` column is a hash of the `tile_data` column, making it possible to both validate each individual tile like in the `flat-with-hash` schema, and also to optimize storage by storing each unique tile only once. + +```sql, ignore +CREATE TABLE map ( + zoom_level INTEGER, + tile_column INTEGER, + tile_row INTEGER, + tile_id TEXT); + +CREATE TABLE images ( + tile_id TEXT, + tile_data BLOB); + +CREATE UNIQUE INDEX map_index ON map ( + zoom_level, tile_column, tile_row); +CREATE UNIQUE INDEX images_id ON images ( + tile_id); + +CREATE VIEW tiles AS + SELECT + map.zoom_level AS zoom_level, + map.tile_column AS tile_column, + map.tile_row AS tile_row, + images.tile_data AS tile_data + FROM + map JOIN images + ON images.tile_id = map.tile_id; +``` + +Optionally, `.mbtiles` files with `normalized` schema can include a `tiles_with_hash` view. All `normalized` files created by the `mbtiles` tool will contain this view. + +```sql, ignore +CREATE VIEW tiles_with_hash AS + SELECT + map.zoom_level AS zoom_level, + map.tile_column AS tile_column, + map.tile_row AS tile_row, + images.tile_data AS tile_data, + images.tile_id AS tile_hash + FROM + map JOIN images + ON map.tile_id = images.tile_id; +``` diff --git a/docs/src/development.md b/docs/src/60-development.md similarity index 94% rename from docs/src/development.md rename to docs/src/60-development.md index adf969d09..1475e6c76 100644 --- a/docs/src/development.md +++ b/docs/src/60-development.md @@ -20,11 +20,11 @@ Install [docker](https://docs.docker.com/get-docker/) and [docker-compose](https sudo apt install -y docker.io docker-compose ``` -Install a few libs and tools like [openssl](https://www.openssl.org/): +Install a few required libs and tools: ```shell, ignore # For Ubuntu-based distros -sudo apt install -y libssl-dev build-essential pkg-config jq file +sudo apt install -y build-essential pkg-config jq file ``` Install [Just](https://github.com/casey/just#readme) (improved makefile processor). Note that some Linux and Homebrew distros have outdated versions of Just, so you should install it from source: @@ -54,7 +54,7 @@ Available recipes: test # Run all tests using a test database test-ssl # Run all tests using an SSL connection to a test database. Expected output won't match. test-legacy # Run all tests using the oldest supported version of the database - test-unit *ARGS # Run Rust unit and doc tests (cargo test) + test-cargo *ARGS # Run Rust unit and doc tests (cargo test) test-int # Run integration tests bless # Run integration tests and save its output as the new expected output book # Build and open mdbook documentation diff --git a/docs/src/SUMMARY.md b/docs/src/SUMMARY.md index c306389b3..fbfd32f21 100644 --- a/docs/src/SUMMARY.md +++ b/docs/src/SUMMARY.md @@ -1,24 +1,29 @@ -[Introduction](introduction.md) -- [Installation](installation.md) -- [Running](run.md) - - [Command Line Interface](run-with-cli.md) - - [Environment Variables](env-vars.md) - - [Running with Docker](run-with-docker.md) - - [Running with Docker Compose](run-with-docker-compose.md) - - [Running with NGINX](run-with-nginx.md) - - [Troubleshooting](troubleshooting.md) -- [Configuration File](config-file.md) - - [PostgreSQL Connections](pg-connections.md) - - [PostgreSQL Table Sources](sources-pg-tables.md) - - [PostgreSQL Function Sources](sources-pg-functions.md) - - [MBTiles and PMTiles File Sources](sources-files.md) - - [Composite Sources](sources-composite.md) - - [Sprite Sources](sources-sprites.md) -- [Usage and Endpoint API](using.md) - - [Using with MapLibre](using-with-maplibre.md) - - [Using with Leaflet](using-with-leaflet.md) - - [Using with deck.gl](using-with-deck-gl.md) - - [Using with Mapbox](using-with-mapbox.md) - - [Recipes](recipes.md) -- [Tools](tools.md) -- [Development](development.md) +[Introduction](00-introduction.md) +- [Installation](10-installation.md) +- [Running](20-run.md) + - [Command Line Interface](21-run-with-cli.md) + - [Environment Variables](21-env-vars.md) + - [Running with Docker](22-run-with-docker.md) + - [Running with Docker Compose](23-run-with-docker-compose.md) + - [Running with NGINX](24-run-with-nginx.md) + - [Troubleshooting](25-troubleshooting.md) +- [Configuration File](30-config-file.md) + - [PostgreSQL Connections](31-pg-connections.md) + - [PostgreSQL Table Sources](32-sources-pg-tables.md) + - [PostgreSQL Function Sources](33-sources-pg-functions.md) + - [MBTiles and PMTiles File Sources](34-sources-files.md) + - [Composite Sources](35-sources-composite.md) + - [Sprite Sources](36-sources-sprites.md) + - [Font Sources](37-sources-fonts.md) +- [Usage and Endpoint API](40-using-endpoints.md) + - [Using with MapLibre](41-using-with-maplibre.md) + - [Using with Leaflet](42-using-with-leaflet.md) + - [Using with deck.gl](43-using-with-deck-gl.md) + - [Using with Mapbox](44-using-with-mapbox.md) + - [Recipes](45-recipes.md) +- [Tools](50-tools.md) + - [MBTiles Metadata](51-mbtiles-meta.md) + - [MBTiles Copying / Diffing](52-mbtiles-copy.md) + - [MBTiles Validation](53-mbtiles-validation.md) + - [MBTiles Schemas](54-mbtiles-schema.md) +- [Development](60-development.md) diff --git a/docs/src/tools.md b/docs/src/tools.md deleted file mode 100644 index 05f444035..000000000 --- a/docs/src/tools.md +++ /dev/null @@ -1,115 +0,0 @@ -# Tools - -Martin has a few additional tools that can be used to interact with the data. - -## MBTiles tool -A small utility that allows users to interact with the `*.mbtiles` files from the command line. Use `mbtiles --help` to see a list of available commands, and `mbtiles --help` to see help for a specific command. - -This tool can be installed by compiling the latest released version with `cargo install martin-mbtiles`, or by downloading a pre-built binary from the [releases page](https://github.com/maplibre/martin/releases/latest). - -### meta-all -Print all metadata values to stdout, as well as the results of tile detection. The format of the values printed is not stable, and should only be used for visual inspection. - -```shell -mbtiles meta-all my_file.mbtiles -``` - -### meta-get -Retrieve raw metadata value by its name. The value is printed to stdout without any modifications. For example, to get the `description` value from an mbtiles file: - -```shell -mbtiles meta-get my_file.mbtiles description -``` - -### meta-set -Set metadata value by its name, or delete the key if no value is supplied. For example, to set the `description` value to `A vector tile dataset`: - -```shell -mbtiles meta-set my_file.mbtiles description "A vector tile dataset" -``` - -### copy -Copy an mbtiles file, optionally filtering its content by zoom levels. - -```shell -mbtiles copy src_file.mbtiles dst_file.mbtiles \ - --min-zoom 0 --max-zoom 10 -``` - -Copy command can also be used to compare two mbtiles files and generate a diff. -```shell -mbtiles copy src_file.mbtiles diff_file.mbtiles \ - --diff-with-file modified_file.mbtiles -``` - -This command can also be used to generate files of different [supported schema](##supported-schema). -```shell -mbtiles copy normalized.mbtiles dst.mbtiles \ - --dst-mbttype flat-with-hash -``` -### apply-diff -Apply the diff file generated from `copy` command above to an mbtiles file. The diff file can be applied to the `src_file.mbtiles` elsewhere, to avoid copying/transmitting the entire modified dataset. -```shell -mbtiles apply_diff src_file.mbtiles diff_file.mbtiles -``` - -Another way to apply the diff is to use the `sqlite3` command line tool directly. This SQL will delete all tiles from `src_file.mbtiles` that are set to `NULL` in `diff_file.mbtiles`, and then insert or update all new tiles from `diff_file.mbtiles` into `src_file.mbtiles`, where both files are of `flat` type. The name of the diff file is passed as a query parameter to the sqlite3 command line tool, and then used in the SQL statements. -```shell -sqlite3 src_file.mbtiles \ - -bail \ - -cmd ".parameter set @diffDbFilename diff_file.mbtiles" \ - "ATTACH DATABASE @diffDbFilename AS diffDb;" \ - "DELETE FROM tiles WHERE (zoom_level, tile_column, tile_row) IN (SELECT zoom_level, tile_column, tile_row FROM diffDb.tiles WHERE tile_data ISNULL);" \ - "INSERT OR REPLACE INTO tiles (zoom_level, tile_column, tile_row, tile_data) SELECT * FROM diffDb.tiles WHERE tile_data NOTNULL;" -``` - -### validate -If the `.mbtiles` file is of `flat_with_hash` or `normalized` type, then verify that the data stored in columns `tile_hash` and `tile_id` respectively are MD5 hashes of the `tile_data` column. -```shell -mbtiles validate src_file.mbtiles -``` - -## Supported Schema -The `mbtiles` tool supports three different kinds of schema for `tiles` data in `.mbtiles` files: - -- `flat`: - ``` - CREATE TABLE tiles (zoom_level integer, tile_column integer, tile_row integer, tile_data blob); - CREATE UNIQUE INDEX tile_index on tiles (zoom_level, tile_column, tile_row); - ``` -- `flat-with-hash`: - ``` - CREATE TABLE tiles_with_hash (zoom_level integer NOT NULL, tile_column integer NOT NULL, tile_row integer NOT NULL, tile_data blob, tile_hash text); - CREATE UNIQUE INDEX tiles_with_hash_index on tiles_with_hash (zoom_level, tile_column, tile_row); - CREATE VIEW tiles AS SELECT zoom_level, tile_column, tile_row, tile_data FROM tiles_with_hash; - ``` -- `normalized`: - ``` - CREATE TABLE map (zoom_level INTEGER, tile_column INTEGER, tile_row INTEGER, tile_id TEXT); - CREATE UNIQUE INDEX map_index ON map (zoom_level, tile_column, tile_row); - CREATE TABLE images (tile_data blob, tile_id text); - CREATE UNIQUE INDEX images_id ON images (tile_id); - CREATE VIEW tiles AS - SELECT - map.zoom_level AS zoom_level, - map.tile_column AS tile_column, - map.tile_row AS tile_row, - images.tile_data AS tile_data - FROM map - JOIN images ON images.tile_id = map.tile_id; - ``` - Optionally, `.mbtiles` files with `normalized` schema can include a `tiles_with_hash` view: - ``` - CREATE VIEW tiles_with_hash AS - SELECT - map.zoom_level AS zoom_level, - map.tile_column AS tile_column, - map.tile_row AS tile_row, - images.tile_data AS tile_data, - images.tile_id AS tile_hash - FROM map - JOIN images ON images.tile_id = map.tile_id; - ``` - **__Note:__** All `normalized` files created by the `mbtiles` tool will contain this view. - -For more general spec information, see [here](https://github.com/mapbox/mbtiles-spec#readme). diff --git a/docs/src/using.md b/docs/src/using.md deleted file mode 100644 index a42fbd26e..000000000 --- a/docs/src/using.md +++ /dev/null @@ -1,58 +0,0 @@ -# Martin Endpoints - -Martin data is available via the HTTP `GET` endpoints: - -| URL | Description | -|----------------------------------------|------------------------------------------------| -| `/` | Status text, that will eventually show web UI | -| `/catalog` | [List of all sources](#catalog) | -| `/{sourceID}` | [Source TileJSON](#source-tilejson) | -| `/{sourceID}/{z}/{x}/{y}` | Map Tiles | -| `/{source1},...,{sourceN}` | [Composite Source TileJSON](#source-tilejson) | -| `/{source1},...,{sourceN}/{z}/{x}/{y}` | [Composite Source Tiles](sources-composite.md) | -| `/sprite/{spriteID}[@2x].{json,png}` | [Sprite sources](sources-sprites.md) | -| `/health` | Martin server health check: returns 200 `OK` | - -## Duplicate Source ID -In case there is more than one source that has the same name, e.g. a PG function is available in two schemas/connections, or a table has more than one geometry columns, sources will be assigned unique IDs such as `/points`, `/points.1`, etc. - -## Reserved Source IDs -Some source IDs are reserved for internal use. If you try to use them, they will be automatically renamed to a unique ID the same way as duplicate source IDs are handled, e.g. a `catalog` source will become `catalog.1`. - -Some of the reserved IDs: `_`, `catalog`, `config`, `font`, `health`, `help`, `index`, `manifest`, `metrics`, `refresh`, -`reload`, `sprite`, `status`. - -## Catalog - -A list of all available sources is available via catalogue endpoint: - -```shell -curl localhost:3000/catalog | jq -``` - -```yaml -{ - "tiles" { - "function_zxy_query": { - "name": "public.function_zxy_query", - "content_type": "application/x-protobuf" - }, - "points1": { - "name": "public.points1.geom", - "content_type": "image/webp" - }, - ... - }, -} -``` - -## Source TileJSON - -All tile sources have a [TileJSON](https://github.com/mapbox/tilejson-spec) endpoint available at the `/{SourceID}`. - -For example, a `points` function or a table will be available as `/points`. Composite source combining `points` and `lines` sources will be available at `/points,lines` endpoint. - -```shell -curl localhost:3000/points | jq -curl localhost:3000/points,lines | jq -``` diff --git a/homebrew-formula/martin.rb b/homebrew-formula/martin.rb deleted file mode 100644 index e89f08b46..000000000 --- a/homebrew-formula/martin.rb +++ /dev/null @@ -1,29 +0,0 @@ -class Martin < Formula - current_version="0.8.7" - - desc "Blazing fast and lightweight tile server with PostGIS, MBTiles, and PMTiles support" - homepage "https://github.com/maplibre/martin" - url "https://github.com/maplibre/martin/releases/download/v#{current_version}/martin-Darwin-x86_64.tar.gz" - - # This is the sha256 checksum of the martin-Darwin-x86_64.tar.gz file - # I am not certain if arch64 should have a different sha256 somewhere - sha256 "92f660b1bef3a54dc84e4794a5ba02a8817c25f21ce7000783749bbae9e50de1" - version "#{current_version}" - - depends_on "openssl@3" - - def install - bin.install "martin" - end - - def caveats; <<~EOS - Martin requires a database connection string. - It can be passed as a command-line argument or as a DATABASE_URL environment variable. - martin postgres://postgres@localhost/db - EOS - end - - test do - `#{bin}/martin --version` - end -end diff --git a/justfile b/justfile index 503a4df79..873c8aa9b 100644 --- a/justfile +++ b/justfile @@ -2,19 +2,25 @@ set shell := ["bash", "-c"] +#export DATABASE_URL="postgres://postgres:postgres@localhost:5411/db" export PGPORT := "5411" export DATABASE_URL := "postgres://postgres:postgres@localhost:" + PGPORT + "/db" export CARGO_TERM_COLOR := "always" -# export RUST_LOG := "debug" -# export RUST_BACKTRACE := "1" +#export RUST_LOG := "debug" +#export RUST_LOG := "sqlx::query=info,trace" +#export RUST_BACKTRACE := "1" @_default: - just --list --unsorted + {{just_executable()}} --list --unsorted # Start Martin server run *ARGS: - cargo run -- {{ ARGS }} + cargo run -p martin -- {{ ARGS }} + +# Run mbtiles command +mbtiles *ARGS: + cargo run -p mbtiles -- {{ ARGS }} # Start release-compiled Martin server and a test database run-release *ARGS: start @@ -23,7 +29,7 @@ run-release *ARGS: start # Start Martin server and open a test page debug-page *ARGS: start open tests/debug.html # run will not exit, so open debug page first - just run {{ ARGS }} + {{just_executable()}} run {{ ARGS }} # Run PSQL utility against the test database psql *ARGS: @@ -43,18 +49,25 @@ clean-test: rm -rf tests/output # Start a test database -start: (docker-up "db") +start: (docker-up "db") docker-is-ready # Start an ssl-enabled test database -start-ssl: (docker-up "db-ssl") +start-ssl: (docker-up "db-ssl") docker-is-ready + +# Start an ssl-enabled test database that requires a client certificate +start-ssl-cert: (docker-up "db-ssl-cert") docker-is-ready # Start a legacy test database -start-legacy: (docker-up "db-legacy") +start-legacy: (docker-up "db-legacy") docker-is-ready # Start a specific test database, e.g. db or db-legacy [private] docker-up name: docker-compose up -d {{ name }} + +# Wait for the test database to be ready +[private] +docker-is-ready: docker-compose run -T --rm db-is-ready alias _down := stop @@ -62,8 +75,8 @@ alias _stop-db := stop # Restart the test database restart: - just stop - just start + {{just_executable()}} stop + {{just_executable()}} start # Stop the test database stop: @@ -81,19 +94,40 @@ bench-http: (cargo-install "oha") oha -z 120s http://localhost:3000/function_zxy_query/18/235085/122323 # Run all tests using a test database -test: (docker-up "db") test-unit test-int +test: start (test-cargo "--all-targets") test-doc test-int # Run all tests using an SSL connection to a test database. Expected output won't match. -test-ssl: (docker-up "ssl") test-unit clean-test +test-ssl: start-ssl (test-cargo "--all-targets") test-doc clean-test + tests/test.sh + +# Run all tests using an SSL connection with client cert to a test database. Expected output won't match. +test-ssl-cert: start-ssl-cert + #!/usr/bin/env bash + set -euxo pipefail + # copy client cert to the tests folder from the docker container + KEY_DIR=target/certs + mkdir -p $KEY_DIR + docker cp martin-db-ssl-cert-1:/etc/ssl/certs/ssl-cert-snakeoil.pem $KEY_DIR/ssl-cert-snakeoil.pem + docker cp martin-db-ssl-cert-1:/etc/ssl/private/ssl-cert-snakeoil.key $KEY_DIR/ssl-cert-snakeoil.key + # export DATABASE_URL="$DATABASE_URL?sslmode=verify-full&sslrootcert=$KEY_DIR/ssl-cert-snakeoil.pem&sslcert=$KEY_DIR/ssl-cert-snakeoil.pem&sslkey=$KEY_DIR/ssl-cert-snakeoil.key" + export PGSSLROOTCERT="$KEY_DIR/ssl-cert-snakeoil.pem" + export PGSSLCERT="$KEY_DIR/ssl-cert-snakeoil.pem" + export PGSSLKEY="$KEY_DIR/ssl-cert-snakeoil.key" + {{just_executable()}} test-cargo --all-targets + {{just_executable()}} clean-test + {{just_executable()}} test-doc tests/test.sh # Run all tests using the oldest supported version of the database -test-legacy: (docker-up "db-legacy") test-unit test-int +test-legacy: start-legacy (test-cargo "--all-targets") test-doc test-int + +# Run Rust unit tests (cargo test) +test-cargo *ARGS: + cargo test {{ ARGS }} -# Run Rust unit and doc tests (cargo test) -test-unit *ARGS: - cargo test --all-targets {{ ARGS }} - cargo test --doc +# Run Rust doc tests +test-doc *ARGS: + cargo test --doc {{ ARGS }} # Run integration tests test-int: clean-test install-sqlx @@ -109,12 +143,22 @@ test-int: clean-test install-sqlx fi # Run integration tests and save its output as the new expected output -bless: start clean-test - cargo test --features bless-tests +bless: restart clean-test bless-insta-martin bless-insta-mbtiles + rm -rf tests/temp + cargo test -p martin --features bless-tests tests/test.sh rm -rf tests/expected mv tests/output tests/expected +# Run integration tests and save its output as the new expected output +bless-insta-mbtiles *ARGS: (cargo-install "cargo-insta") + #rm -rf mbtiles/tests/snapshots + cargo insta test --accept --unreferenced=auto -p mbtiles {{ ARGS }} + +# Run integration tests and save its output as the new expected output +bless-insta-martin *ARGS: (cargo-install "cargo-insta") + cargo insta test --accept --unreferenced=auto -p martin {{ ARGS }} + # Build and open mdbook documentation book: (cargo-install "mdbook") mdbook serve docs --open --port 8321 @@ -136,8 +180,8 @@ coverage FORMAT='html': (cargo-install "grcov") rustup component add llvm-tools-preview ;\ fi - just clean - just start + {{just_executable()}} clean + {{just_executable()}} start PROF_DIR=target/prof mkdir -p "$PROF_DIR" @@ -204,19 +248,20 @@ fmt2: # Run cargo clippy clippy: cargo clippy --workspace --all-targets --bins --tests --lib --benches -- -D warnings + RUSTDOCFLAGS="-D warnings" cargo doc --no-deps --workspace # These steps automatically run before git push via a git hook [private] git-pre-push: stop start rustc --version cargo --version - just lint - just test + {{just_executable()}} lint + {{just_executable()}} test # Update sqlite database schema. prepare-sqlite: install-sqlx - mkdir -p martin-mbtiles/.sqlx - cd martin-mbtiles && cargo sqlx prepare --database-url sqlite://$PWD/../tests/fixtures/files/world_cities.mbtiles -- --lib --tests + mkdir -p mbtiles/.sqlx + cd mbtiles && cargo sqlx prepare --database-url sqlite://$PWD/../tests/fixtures/mbtiles/world_cities.mbtiles -- --lib --tests # Install SQLX cli if not already installed. [private] diff --git a/martin-mbtiles/.sqlx/query-14f262aafedb8739ee403fe6fc67989d706ce91630c9332a600e8022c0d4b628.json b/martin-mbtiles/.sqlx/query-14f262aafedb8739ee403fe6fc67989d706ce91630c9332a600e8022c0d4b628.json deleted file mode 100644 index 0848bf78d..000000000 --- a/martin-mbtiles/.sqlx/query-14f262aafedb8739ee403fe6fc67989d706ce91630c9332a600e8022c0d4b628.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "db_name": "SQLite", - "query": "SELECT (\n -- Has a \"map\" table\n SELECT COUNT(*) = 1\n FROM sqlite_master\n WHERE name = 'map'\n AND type = 'table'\n --\n ) AND (\n -- \"map\" table's columns and their types are as expected:\n -- 4 columns (zoom_level, tile_column, tile_row, tile_id).\n -- The order is not important\n SELECT COUNT(*) = 4\n FROM pragma_table_info('map')\n WHERE ((name = \"zoom_level\" AND type = \"INTEGER\")\n OR (name = \"tile_column\" AND type = \"INTEGER\")\n OR (name = \"tile_row\" AND type = \"INTEGER\")\n OR (name = \"tile_id\" AND type = \"TEXT\"))\n --\n ) AND (\n -- Has a \"images\" table\n SELECT COUNT(*) = 1\n FROM sqlite_master\n WHERE name = 'images'\n AND type = 'table'\n --\n ) AND (\n -- \"images\" table's columns and their types are as expected:\n -- 2 columns (tile_id, tile_data).\n -- The order is not important\n SELECT COUNT(*) = 2\n FROM pragma_table_info('images')\n WHERE ((name = \"tile_id\" AND type = \"TEXT\")\n OR (name = \"tile_data\" AND type = \"BLOB\"))\n --\n ) AS is_valid;\n", - "describe": { - "columns": [ - { - "name": "is_valid", - "ordinal": 0, - "type_info": "Int" - } - ], - "parameters": { - "Right": 0 - }, - "nullable": [ - null - ] - }, - "hash": "14f262aafedb8739ee403fe6fc67989d706ce91630c9332a600e8022c0d4b628" -} diff --git a/martin-mbtiles/.sqlx/query-177aed5e4ee0e7a23eb708174a829e7f1af10037bdfb6543b029cc80c3ee60dd.json b/martin-mbtiles/.sqlx/query-177aed5e4ee0e7a23eb708174a829e7f1af10037bdfb6543b029cc80c3ee60dd.json deleted file mode 100644 index 6e141d9d8..000000000 --- a/martin-mbtiles/.sqlx/query-177aed5e4ee0e7a23eb708174a829e7f1af10037bdfb6543b029cc80c3ee60dd.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "db_name": "SQLite", - "query": "SELECT (\n -- Has a \"tiles\" table\n SELECT COUNT(*) = 1\n FROM sqlite_master\n WHERE name = 'tiles'\n AND type = 'table'\n --\n ) AND (\n -- \"tiles\" table's columns and their types are as expected:\n -- 4 columns (zoom_level, tile_column, tile_row, tile_data).\n -- The order is not important\n SELECT COUNT(*) = 4\n FROM pragma_table_info('tiles')\n WHERE ((name = \"zoom_level\" AND type = \"INTEGER\")\n OR (name = \"tile_column\" AND type = \"INTEGER\")\n OR (name = \"tile_row\" AND type = \"INTEGER\")\n OR (name = \"tile_data\" AND type = \"BLOB\"))\n --\n ) as is_valid;\n", - "describe": { - "columns": [ - { - "name": "is_valid", - "ordinal": 0, - "type_info": "Int" - } - ], - "parameters": { - "Right": 0 - }, - "nullable": [ - null - ] - }, - "hash": "177aed5e4ee0e7a23eb708174a829e7f1af10037bdfb6543b029cc80c3ee60dd" -} diff --git a/martin-mbtiles/.sqlx/query-3a1e6e16157856190e061e1ade9b59995c337cfe7e4c54d4bbb2669a27682401.json b/martin-mbtiles/.sqlx/query-3a1e6e16157856190e061e1ade9b59995c337cfe7e4c54d4bbb2669a27682401.json deleted file mode 100644 index 6230f16d4..000000000 --- a/martin-mbtiles/.sqlx/query-3a1e6e16157856190e061e1ade9b59995c337cfe7e4c54d4bbb2669a27682401.json +++ /dev/null @@ -1,20 +0,0 @@ -{ - "db_name": "SQLite", - "query": "SELECT (\n -- Has a \"tiles_with_hash\" table\n SELECT COUNT(*) = 1\n FROM sqlite_master\n WHERE name = 'tiles_with_hash'\n AND type = 'table'\n --\n ) AND (\n -- \"tiles_with_hash\" table's columns and their types are as expected:\n -- 5 columns (zoom_level, tile_column, tile_row, tile_data, tile_hash).\n -- The order is not important\n SELECT COUNT(*) = 5\n FROM pragma_table_info('tiles_with_hash')\n WHERE ((name = \"zoom_level\" AND type = \"INTEGER\")\n OR (name = \"tile_column\" AND type = \"INTEGER\")\n OR (name = \"tile_row\" AND type = \"INTEGER\")\n OR (name = \"tile_data\" AND type = \"BLOB\")\n OR (name = \"tile_hash\" AND type = \"TEXT\"))\n --\n ) as is_valid;\n", - "describe": { - "columns": [ - { - "name": "is_valid", - "ordinal": 0, - "type_info": "Int" - } - ], - "parameters": { - "Right": 0 - }, - "nullable": [ - null - ] - }, - "hash": "3a1e6e16157856190e061e1ade9b59995c337cfe7e4c54d4bbb2669a27682401" -} diff --git a/martin-mbtiles/.sqlx/query-3b2930e8d61f31ea1bf32efe340b7766f876ddb9a357a512ab3a37914bea003c.json b/martin-mbtiles/.sqlx/query-3b2930e8d61f31ea1bf32efe340b7766f876ddb9a357a512ab3a37914bea003c.json deleted file mode 100644 index 1a8fe4fd0..000000000 --- a/martin-mbtiles/.sqlx/query-3b2930e8d61f31ea1bf32efe340b7766f876ddb9a357a512ab3a37914bea003c.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "db_name": "SQLite", - "query": "ATTACH DATABASE ? AS sourceDb", - "describe": { - "columns": [], - "parameters": { - "Right": 1 - }, - "nullable": [] - }, - "hash": "3b2930e8d61f31ea1bf32efe340b7766f876ddb9a357a512ab3a37914bea003c" -} diff --git a/martin-mbtiles/.sqlx/query-b3aaef71d6a26404c3bebcc6ee8ad480aaa224721cd9ddb4ac5859f71a57727e.json b/martin-mbtiles/.sqlx/query-b3aaef71d6a26404c3bebcc6ee8ad480aaa224721cd9ddb4ac5859f71a57727e.json deleted file mode 100644 index 71fbbc367..000000000 --- a/martin-mbtiles/.sqlx/query-b3aaef71d6a26404c3bebcc6ee8ad480aaa224721cd9ddb4ac5859f71a57727e.json +++ /dev/null @@ -1,12 +0,0 @@ -{ - "db_name": "SQLite", - "query": "ATTACH DATABASE ? AS otherDb", - "describe": { - "columns": [], - "parameters": { - "Right": 1 - }, - "nullable": [] - }, - "hash": "b3aaef71d6a26404c3bebcc6ee8ad480aaa224721cd9ddb4ac5859f71a57727e" -} diff --git a/martin-mbtiles/README.md b/martin-mbtiles/README.md deleted file mode 100644 index d0a0484fd..000000000 --- a/martin-mbtiles/README.md +++ /dev/null @@ -1,16 +0,0 @@ -# martin-mbtiles - -[![Book](https://img.shields.io/badge/docs-Book-informational)](https://maplibre.org/martin/tools.html) -[![docs.rs docs](https://docs.rs/martin-mbtiles/badge.svg)](https://docs.rs/martin-mbtiles) -[![Slack chat](https://img.shields.io/badge/Chat-on%20Slack-blueviolet)](https://slack.openstreetmap.us/) -[![GitHub](https://img.shields.io/badge/github-maplibre/martin-8da0cb?logo=github)](https://github.com/maplibre/martin) -[![crates.io version](https://img.shields.io/crates/v/martin-mbtiles.svg)](https://crates.io/crates/martin-mbtiles) -[![CI build](https://github.com/maplibre/martin/workflows/CI/badge.svg)](https://github.com/maplibre/martin-mbtiles/actions) - -A library to help tile servers like [Martin](https://maplibre.org/martin) work with [MBTiles](https://github.com/mapbox/mbtiles-spec) files. - -This crate also has a small utility that allows users to interact with the `*.mbtiles` files from the command line. See [tools](https://maplibre.org/martin/tools.html) documentation for more information. - -### Development - -Any changes to SQL commands require running of `just prepare-sqlite`. This will install `cargo sqlx` command if it is not already installed, and update the `./sqlx-data.json` file. diff --git a/martin-mbtiles/src/lib.rs b/martin-mbtiles/src/lib.rs deleted file mode 100644 index 9c16049ef..000000000 --- a/martin-mbtiles/src/lib.rs +++ /dev/null @@ -1,14 +0,0 @@ -#![allow(clippy::missing_errors_doc)] - -mod errors; -mod mbtiles; -mod mbtiles_pool; -mod mbtiles_queries; -mod tile_copier; - -pub use errors::MbtError; -pub use mbtiles::{IntegrityCheckType, Mbtiles, Metadata}; -pub use mbtiles_pool::MbtilesPool; -pub use tile_copier::{ - apply_mbtiles_diff, copy_mbtiles_file, CopyDuplicateMode, TileCopierOptions, -}; diff --git a/martin-mbtiles/src/mbtiles_queries.rs b/martin-mbtiles/src/mbtiles_queries.rs deleted file mode 100644 index 0d04815da..000000000 --- a/martin-mbtiles/src/mbtiles_queries.rs +++ /dev/null @@ -1,125 +0,0 @@ -use sqlx::{query, SqliteExecutor}; - -use crate::errors::MbtResult; - -pub async fn is_normalized_tables_type(conn: &mut T) -> MbtResult -where - for<'e> &'e mut T: SqliteExecutor<'e>, -{ - let sql = query!( - r#"SELECT ( - -- Has a "map" table - SELECT COUNT(*) = 1 - FROM sqlite_master - WHERE name = 'map' - AND type = 'table' - -- - ) AND ( - -- "map" table's columns and their types are as expected: - -- 4 columns (zoom_level, tile_column, tile_row, tile_id). - -- The order is not important - SELECT COUNT(*) = 4 - FROM pragma_table_info('map') - WHERE ((name = "zoom_level" AND type = "INTEGER") - OR (name = "tile_column" AND type = "INTEGER") - OR (name = "tile_row" AND type = "INTEGER") - OR (name = "tile_id" AND type = "TEXT")) - -- - ) AND ( - -- Has a "images" table - SELECT COUNT(*) = 1 - FROM sqlite_master - WHERE name = 'images' - AND type = 'table' - -- - ) AND ( - -- "images" table's columns and their types are as expected: - -- 2 columns (tile_id, tile_data). - -- The order is not important - SELECT COUNT(*) = 2 - FROM pragma_table_info('images') - WHERE ((name = "tile_id" AND type = "TEXT") - OR (name = "tile_data" AND type = "BLOB")) - -- - ) AS is_valid; -"# - ); - - Ok(sql - .fetch_one(&mut *conn) - .await? - .is_valid - .unwrap_or_default() - == 1) -} - -pub async fn is_flat_with_hash_tables_type(conn: &mut T) -> MbtResult -where - for<'e> &'e mut T: SqliteExecutor<'e>, -{ - let sql = query!( - r#"SELECT ( - -- Has a "tiles_with_hash" table - SELECT COUNT(*) = 1 - FROM sqlite_master - WHERE name = 'tiles_with_hash' - AND type = 'table' - -- - ) AND ( - -- "tiles_with_hash" table's columns and their types are as expected: - -- 5 columns (zoom_level, tile_column, tile_row, tile_data, tile_hash). - -- The order is not important - SELECT COUNT(*) = 5 - FROM pragma_table_info('tiles_with_hash') - WHERE ((name = "zoom_level" AND type = "INTEGER") - OR (name = "tile_column" AND type = "INTEGER") - OR (name = "tile_row" AND type = "INTEGER") - OR (name = "tile_data" AND type = "BLOB") - OR (name = "tile_hash" AND type = "TEXT")) - -- - ) as is_valid; -"# - ); - - Ok(sql - .fetch_one(&mut *conn) - .await? - .is_valid - .unwrap_or_default() - == 1) -} - -pub async fn is_flat_tables_type(conn: &mut T) -> MbtResult -where - for<'e> &'e mut T: SqliteExecutor<'e>, -{ - let sql = query!( - r#"SELECT ( - -- Has a "tiles" table - SELECT COUNT(*) = 1 - FROM sqlite_master - WHERE name = 'tiles' - AND type = 'table' - -- - ) AND ( - -- "tiles" table's columns and their types are as expected: - -- 4 columns (zoom_level, tile_column, tile_row, tile_data). - -- The order is not important - SELECT COUNT(*) = 4 - FROM pragma_table_info('tiles') - WHERE ((name = "zoom_level" AND type = "INTEGER") - OR (name = "tile_column" AND type = "INTEGER") - OR (name = "tile_row" AND type = "INTEGER") - OR (name = "tile_data" AND type = "BLOB")) - -- - ) as is_valid; -"# - ); - - Ok(sql - .fetch_one(&mut *conn) - .await? - .is_valid - .unwrap_or_default() - == 1) -} diff --git a/martin-mbtiles/src/tile_copier.rs b/martin-mbtiles/src/tile_copier.rs deleted file mode 100644 index e556b79ac..000000000 --- a/martin-mbtiles/src/tile_copier.rs +++ /dev/null @@ -1,902 +0,0 @@ -use std::collections::HashSet; -use std::path::PathBuf; - -#[cfg(feature = "cli")] -use clap::{builder::ValueParser, error::ErrorKind, Args, ValueEnum}; -use sqlite_hashes::rusqlite::params_from_iter; -use sqlx::sqlite::SqliteConnectOptions; -use sqlx::{query, Connection, Row, SqliteConnection}; - -use crate::errors::MbtResult; -use crate::mbtiles::MbtType; -use crate::mbtiles::MbtType::{Flat, FlatWithHash, Normalized}; -use crate::{MbtError, Mbtiles}; - -#[derive(PartialEq, Eq, Default, Debug, Clone)] -#[cfg_attr(feature = "cli", derive(ValueEnum))] -pub enum CopyDuplicateMode { - #[default] - Override, - Ignore, - Abort, -} - -#[derive(Clone, Default, PartialEq, Eq, Debug)] -#[cfg_attr(feature = "cli", derive(Args))] -pub struct TileCopierOptions { - /// MBTiles file to read from - src_file: PathBuf, - /// MBTiles file to write to - dst_file: PathBuf, - /// Output format of the destination file, ignored if the file exists. If not specified, defaults to the type of source - #[cfg_attr(feature = "cli", arg(long, value_enum))] - dst_type: Option, - /// Specify copying behaviour when tiles with duplicate (zoom_level, tile_column, tile_row) values are found - #[cfg_attr(feature = "cli", arg(long, value_enum, default_value_t = CopyDuplicateMode::default()))] - on_duplicate: CopyDuplicateMode, - /// Minimum zoom level to copy - #[cfg_attr(feature = "cli", arg(long, conflicts_with("zoom_levels")))] - min_zoom: Option, - /// Maximum zoom level to copy - #[cfg_attr(feature = "cli", arg(long, conflicts_with("zoom_levels")))] - max_zoom: Option, - /// List of zoom levels to copy - #[cfg_attr(feature = "cli", arg(long, value_parser(ValueParser::new(HashSetValueParser{})), default_value=""))] - zoom_levels: HashSet, - /// Compare source file with this file, and only copy non-identical tiles to destination - #[cfg_attr(feature = "cli", arg(long))] - diff_with_file: Option, - /// Skip generating a global hash for mbtiles validation. By default, if dst_type is flat-with-hash or normalized, generate a global hash and store in the metadata table - #[cfg_attr(feature = "cli", arg(long))] - skip_agg_tiles_hash: bool, -} - -#[cfg(feature = "cli")] -#[derive(Clone)] -struct HashSetValueParser; - -#[cfg(feature = "cli")] -impl clap::builder::TypedValueParser for HashSetValueParser { - type Value = HashSet; - - fn parse_ref( - &self, - _cmd: &clap::Command, - _arg: Option<&clap::Arg>, - value: &std::ffi::OsStr, - ) -> Result { - let mut result = HashSet::::new(); - let values = value - .to_str() - .ok_or(clap::Error::new(ErrorKind::ValueValidation))? - .trim(); - if !values.is_empty() { - for val in values.split(',') { - result.insert( - val.trim() - .parse::() - .map_err(|_| clap::Error::new(ErrorKind::ValueValidation))?, - ); - } - } - Ok(result) - } -} - -#[derive(Clone, Debug)] -struct TileCopier { - src_mbtiles: Mbtiles, - dst_mbtiles: Mbtiles, - options: TileCopierOptions, -} - -impl TileCopierOptions { - #[must_use] - pub fn new(src_filepath: PathBuf, dst_filepath: PathBuf) -> Self { - Self { - src_file: src_filepath, - dst_file: dst_filepath, - zoom_levels: HashSet::new(), - dst_type: None, - on_duplicate: CopyDuplicateMode::Override, - min_zoom: None, - max_zoom: None, - diff_with_file: None, - skip_agg_tiles_hash: false, - } - } - - #[must_use] - pub fn dst_type(mut self, dst_type: Option) -> Self { - self.dst_type = dst_type; - self - } - - #[must_use] - pub fn on_duplicate(mut self, on_duplicate: CopyDuplicateMode) -> Self { - self.on_duplicate = on_duplicate; - self - } - - #[must_use] - pub fn zoom_levels(mut self, zoom_levels: Vec) -> Self { - self.zoom_levels.extend(zoom_levels); - self - } - - #[must_use] - pub fn min_zoom(mut self, min_zoom: Option) -> Self { - self.min_zoom = min_zoom; - self - } - - #[must_use] - pub fn max_zoom(mut self, max_zoom: Option) -> Self { - self.max_zoom = max_zoom; - self - } - - #[must_use] - pub fn diff_with_file(mut self, diff_with_file: PathBuf) -> Self { - self.diff_with_file = Some(diff_with_file); - self - } - - #[must_use] - pub fn skip_agg_tiles_hash(mut self, skip_global_hash: bool) -> Self { - self.skip_agg_tiles_hash = skip_global_hash; - self - } -} - -impl TileCopier { - pub fn new(options: TileCopierOptions) -> MbtResult { - Ok(TileCopier { - src_mbtiles: Mbtiles::new(&options.src_file)?, - dst_mbtiles: Mbtiles::new(&options.dst_file)?, - options, - }) - } - - pub async fn run(self) -> MbtResult { - let src_type = open_and_detect_type(&self.src_mbtiles).await?; - - let mut conn = SqliteConnection::connect_with( - &SqliteConnectOptions::new() - .create_if_missing(true) - .filename(&self.options.dst_file), - ) - .await?; - - let is_empty = query!("SELECT 1 as has_rows FROM sqlite_schema LIMIT 1") - .fetch_optional(&mut conn) - .await? - .is_none(); - - let dst_type = if is_empty { - let dst_type = self.options.dst_type.unwrap_or(src_type); - self.create_new_mbtiles(&mut conn, dst_type, src_type) - .await?; - dst_type - } else if self.options.diff_with_file.is_some() { - return Err(MbtError::NonEmptyTargetFile(self.options.dst_file)); - } else { - open_and_detect_type(&self.dst_mbtiles).await? - }; - - let rusqlite_conn = self.dst_mbtiles.open_with_hashes(false)?; - rusqlite_conn.execute( - "ATTACH DATABASE ? AS sourceDb", - [self.src_mbtiles.filepath()], - )?; - - let (on_dupl, sql_cond) = self.get_on_duplicate_sql(dst_type); - - let (select_from, query_args) = { - let select_from = if let Some(diff_file) = &self.options.diff_with_file { - let diff_with_mbtiles = Mbtiles::new(diff_file)?; - let diff_type = open_and_detect_type(&diff_with_mbtiles).await?; - - rusqlite_conn - .execute("ATTACH DATABASE ? AS newDb", [diff_with_mbtiles.filepath()])?; - - Self::get_select_from_with_diff(dst_type, diff_type) - } else { - Self::get_select_from(dst_type, src_type).to_string() - }; - - let (options_sql, query_args) = self.get_options_sql(); - - (format!("{select_from} {options_sql}"), query_args) - }; - - match dst_type { - Flat => rusqlite_conn.execute( - &format!("INSERT {on_dupl} INTO tiles {select_from} {sql_cond}"), - params_from_iter(query_args), - )?, - FlatWithHash => rusqlite_conn.execute( - &format!("INSERT {on_dupl} INTO tiles_with_hash {select_from} {sql_cond}"), - params_from_iter(query_args), - )?, - Normalized => { - rusqlite_conn.execute( - &format!( - "INSERT {on_dupl} INTO map (zoom_level, tile_column, tile_row, tile_id) - SELECT zoom_level, tile_column, tile_row, hash as tile_id - FROM ({select_from} {sql_cond})" - ), - params_from_iter(&query_args), - )?; - rusqlite_conn.execute( - &format!( - "INSERT OR IGNORE INTO images SELECT tile_data, hash FROM ({select_from})" - ), - params_from_iter(query_args), - )? - } - }; - - if !self.options.skip_agg_tiles_hash && (dst_type == FlatWithHash || dst_type == Normalized) - { - self.dst_mbtiles.update_agg_tiles_hash(&mut conn).await?; - } - - Ok(conn) - } - - async fn create_new_mbtiles( - &self, - conn: &mut SqliteConnection, - dst_type: MbtType, - src_type: MbtType, - ) -> MbtResult<()> { - let path = self.src_mbtiles.filepath(); - query!("ATTACH DATABASE ? AS sourceDb", path) - .execute(&mut *conn) - .await?; - - query!("PRAGMA page_size = 512").execute(&mut *conn).await?; - query!("VACUUM").execute(&mut *conn).await?; - - if dst_type == src_type { - // DB objects must be created in a specific order: tables, views, triggers, indexes. - for row in query( - "SELECT sql - FROM sourceDb.sqlite_schema - WHERE tbl_name IN ('metadata', 'tiles', 'map', 'images', 'tiles_with_hash') - AND type IN ('table', 'view', 'trigger', 'index') - ORDER BY CASE - WHEN type = 'table' THEN 1 - WHEN type = 'view' THEN 2 - WHEN type = 'trigger' THEN 3 - WHEN type = 'index' THEN 4 - ELSE 5 END", - ) - .fetch_all(&mut *conn) - .await? - { - query(row.get(0)).execute(&mut *conn).await?; - } - } else { - match dst_type { - Flat => self.create_flat_tables(&mut *conn).await?, - FlatWithHash => self.create_flat_with_hash_tables(&mut *conn).await?, - Normalized => self.create_normalized_tables(&mut *conn).await?, - }; - }; - - if dst_type == Normalized { - query( - "CREATE VIEW tiles_with_hash AS - SELECT - map.zoom_level AS zoom_level, - map.tile_column AS tile_column, - map.tile_row AS tile_row, - images.tile_data AS tile_data, - images.tile_id AS tile_hash - FROM map - JOIN images ON images.tile_id = map.tile_id", - ) - .execute(&mut *conn) - .await?; - } - - query("INSERT INTO metadata SELECT * FROM sourceDb.metadata") - .execute(&mut *conn) - .await?; - - Ok(()) - } - - async fn create_flat_tables(&self, conn: &mut SqliteConnection) -> MbtResult<()> { - for statement in &[ - "CREATE TABLE metadata (name text NOT NULL PRIMARY KEY, value text);", - "CREATE TABLE tiles ( - zoom_level integer NOT NULL, - tile_column integer NOT NULL, - tile_row integer NOT NULL, - tile_data blob, - PRIMARY KEY(zoom_level, tile_column, tile_row));", - ] { - query(statement).execute(&mut *conn).await?; - } - Ok(()) - } - - async fn create_flat_with_hash_tables(&self, conn: &mut SqliteConnection) -> MbtResult<()> { - for statement in &[ - "CREATE TABLE metadata (name text NOT NULL PRIMARY KEY, value text);", - "CREATE TABLE tiles_with_hash ( - zoom_level integer NOT NULL, - tile_column integer NOT NULL, - tile_row integer NOT NULL, - tile_data blob, - tile_hash text, - PRIMARY KEY(zoom_level, tile_column, tile_row));", - "CREATE VIEW tiles AS - SELECT zoom_level, tile_column, tile_row, tile_data FROM tiles_with_hash;", - ] { - query(statement).execute(&mut *conn).await?; - } - Ok(()) - } - - async fn create_normalized_tables(&self, conn: &mut SqliteConnection) -> MbtResult<()> { - for statement in &[ - "CREATE TABLE metadata (name text NOT NULL PRIMARY KEY, value text);", - "CREATE TABLE map ( - zoom_level integer NOT NULL, - tile_column integer NOT NULL, - tile_row integer NOT NULL, - tile_id text, - PRIMARY KEY(zoom_level, tile_column, tile_row));", - "CREATE TABLE images (tile_data blob, tile_id text NOT NULL PRIMARY KEY);", - "CREATE VIEW tiles AS - SELECT map.zoom_level AS zoom_level, map.tile_column AS tile_column, map.tile_row AS tile_row, images.tile_data AS tile_data - FROM map - JOIN images ON images.tile_id = map.tile_id;"] { - query(statement).execute(&mut *conn).await?; - } - Ok(()) - } - - fn get_on_duplicate_sql(&self, mbttype: MbtType) -> (String, String) { - match &self.options.on_duplicate { - CopyDuplicateMode::Override => ("OR REPLACE".to_string(), String::new()), - CopyDuplicateMode::Ignore => ("OR IGNORE".to_string(), String::new()), - CopyDuplicateMode::Abort => ("OR ABORT".to_string(), { - let (main_table, tile_identifier) = match mbttype { - Flat => ("tiles", "tile_data"), - FlatWithHash => ("tiles_with_hash", "tile_data"), - Normalized => ("map", "tile_id"), - }; - - format!( - "AND NOT EXISTS ( - SELECT 1 - FROM {main_table} - WHERE - {main_table}.zoom_level = sourceDb.{main_table}.zoom_level - AND {main_table}.tile_column = sourceDb.{main_table}.tile_column - AND {main_table}.tile_row = sourceDb.{main_table}.tile_row - AND {main_table}.{tile_identifier} != sourceDb.{main_table}.{tile_identifier} - )" - ) - }), - } - } - - fn get_select_from_with_diff(dst_type: MbtType, diff_type: MbtType) -> String { - let (hash_col_sql, new_tiles_with_hash) = if dst_type == Flat { - ("", "newDb.tiles") - } else { - match diff_type { - Flat => (", hex(md5(tile_data)) as hash", "newDb.tiles"), - FlatWithHash => (", new_tiles_with_hash.tile_hash as hash", "newDb.tiles_with_hash"), - Normalized => (", new_tiles_with_hash.hash", - "(SELECT zoom_level, tile_column, tile_row, tile_data, map.tile_id AS hash - FROM newDb.map JOIN newDb.images ON newDb.map.tile_id = newDb.images.tile_id)"), - } - }; - - format!("SELECT COALESCE(sourceDb.tiles.zoom_level, new_tiles_with_hash.zoom_level) as zoom_level, - COALESCE(sourceDb.tiles.tile_column, new_tiles_with_hash.tile_column) as tile_column, - COALESCE(sourceDb.tiles.tile_row, new_tiles_with_hash.tile_row) as tile_row, - new_tiles_with_hash.tile_data as tile_data - {hash_col_sql} - FROM sourceDb.tiles FULL JOIN {new_tiles_with_hash} AS new_tiles_with_hash - ON sourceDb.tiles.zoom_level = new_tiles_with_hash.zoom_level - AND sourceDb.tiles.tile_column = new_tiles_with_hash.tile_column - AND sourceDb.tiles.tile_row = new_tiles_with_hash.tile_row - WHERE (sourceDb.tiles.tile_data != new_tiles_with_hash.tile_data - OR sourceDb.tiles.tile_data ISNULL - OR new_tiles_with_hash.tile_data ISNULL)") - } - - fn get_select_from(dst_type: MbtType, src_type: MbtType) -> &'static str { - if dst_type == Flat { - "SELECT * FROM sourceDb.tiles WHERE TRUE " - } else { - match src_type { - Flat => "SELECT zoom_level, tile_column, tile_row, tile_data, hex(md5(tile_data)) as hash FROM sourceDb.tiles WHERE TRUE ", - FlatWithHash => "SELECT zoom_level, tile_column, tile_row, tile_data, tile_hash AS hash FROM sourceDb.tiles_with_hash WHERE TRUE ", - Normalized => "SELECT zoom_level, tile_column, tile_row, tile_data, map.tile_id AS hash FROM sourceDb.map JOIN sourceDb.images ON sourceDb.map.tile_id = sourceDb.images.tile_id WHERE TRUE " - } - } - } - - fn get_options_sql(&self) -> (String, Vec) { - let mut query_args = vec![]; - - let sql = if !&self.options.zoom_levels.is_empty() { - for z in &self.options.zoom_levels { - query_args.push(*z); - } - format!( - " AND zoom_level IN ({})", - vec!["?"; self.options.zoom_levels.len()].join(",") - ) - } else if let Some(min_zoom) = self.options.min_zoom { - if let Some(max_zoom) = self.options.max_zoom { - query_args.push(min_zoom); - query_args.push(max_zoom); - " AND zoom_level BETWEEN ? AND ?".to_string() - } else { - query_args.push(min_zoom); - " AND zoom_level >= ?".to_string() - } - } else if let Some(max_zoom) = self.options.max_zoom { - query_args.push(max_zoom); - " AND zoom_level <= ?".to_string() - } else { - String::new() - }; - - (sql, query_args) - } -} - -async fn open_and_detect_type(mbtiles: &Mbtiles) -> MbtResult { - let opt = SqliteConnectOptions::new() - .read_only(true) - .filename(mbtiles.filepath()); - let mut conn = SqliteConnection::connect_with(&opt).await?; - mbtiles.detect_type(&mut conn).await -} - -pub async fn apply_mbtiles_diff(src_file: PathBuf, diff_file: PathBuf) -> MbtResult<()> { - let src_mbtiles = Mbtiles::new(src_file)?; - let diff_mbtiles = Mbtiles::new(diff_file)?; - - let src_type = open_and_detect_type(&src_mbtiles).await?; - let diff_type = open_and_detect_type(&diff_mbtiles).await?; - - let rusqlite_conn = src_mbtiles.open_with_hashes(false)?; - rusqlite_conn.execute("ATTACH DATABASE ? AS diffDb", [diff_mbtiles.filepath()])?; - - let select_from = if src_type == Flat { - "SELECT zoom_level, tile_column, tile_row, tile_data FROM diffDb.tiles" - } else { - match diff_type { - Flat => "SELECT zoom_level, tile_column, tile_row, tile_data, hex(md5(tile_data)) as hash FROM diffDb.tiles", - FlatWithHash => "SELECT zoom_level, tile_column, tile_row, tile_data, tile_hash AS hash FROM diffDb.tiles_with_hash", - Normalized => "SELECT zoom_level, tile_column, tile_row, tile_data, map.tile_id AS hash FROM diffDb.map LEFT JOIN diffDb.images ON diffDb.map.tile_id = diffDb.images.tile_id", - } - }.to_string(); - - let (main_table, insert_sql) = match src_type { - Flat => ("tiles", vec![ - format!("INSERT OR REPLACE INTO tiles (zoom_level, tile_column, tile_row, tile_data) {select_from}")]), - FlatWithHash => ("tiles_with_hash", vec![ - format!("INSERT OR REPLACE INTO tiles_with_hash {select_from}")]), - Normalized => ("map", vec![ - format!("INSERT OR REPLACE INTO map (zoom_level, tile_column, tile_row, tile_id) - SELECT zoom_level, tile_column, tile_row, hash as tile_id - FROM ({select_from})"), - format!("INSERT OR REPLACE INTO images SELECT tile_data, hash FROM ({select_from})"), - ]) - }; - - for statement in insert_sql { - rusqlite_conn.execute(&format!("{statement} WHERE tile_data NOTNULL"), ())?; - } - - rusqlite_conn.execute( - &format!( - "DELETE FROM {main_table} - WHERE (zoom_level, tile_column, tile_row) IN ( - SELECT zoom_level, tile_column, tile_row FROM ({select_from} WHERE tile_data ISNULL) - )" - ), - (), - )?; - - Ok(()) -} - -pub async fn copy_mbtiles_file(opts: TileCopierOptions) -> MbtResult { - TileCopier::new(opts)?.run().await -} - -#[cfg(test)] -mod tests { - use sqlx::{Decode, Sqlite, SqliteConnection, Type}; - - use super::*; - - async fn get_one(conn: &mut SqliteConnection, sql: &str) -> T - where - for<'r> T: Decode<'r, Sqlite> + Type, - { - query(sql).fetch_one(conn).await.unwrap().get::(0) - } - - async fn verify_copy_all( - src_filepath: PathBuf, - dst_filepath: PathBuf, - dst_type: Option, - expected_dst_type: MbtType, - ) { - let mut dst_conn = copy_mbtiles_file( - TileCopierOptions::new(src_filepath.clone(), dst_filepath.clone()).dst_type(dst_type), - ) - .await - .unwrap(); - - query("ATTACH DATABASE ? AS srcDb") - .bind(src_filepath.clone().to_str().unwrap()) - .execute(&mut dst_conn) - .await - .unwrap(); - - assert_eq!( - open_and_detect_type(&Mbtiles::new(dst_filepath).unwrap()) - .await - .unwrap(), - expected_dst_type - ); - - assert!( - query("SELECT * FROM srcDb.tiles EXCEPT SELECT * FROM tiles") - .fetch_optional(&mut dst_conn) - .await - .unwrap() - .is_none() - ) - } - - async fn verify_copy_with_zoom_filter(opts: TileCopierOptions, expected_zoom_levels: u8) { - let mut dst_conn = copy_mbtiles_file(opts).await.unwrap(); - - assert_eq!( - get_one::( - &mut dst_conn, - "SELECT COUNT(DISTINCT zoom_level) FROM tiles;" - ) - .await, - expected_zoom_levels - ); - } - - #[actix_rt::test] - async fn copy_flat_tables() { - let src = PathBuf::from("../tests/fixtures/files/world_cities.mbtiles"); - let dst = PathBuf::from("file:copy_flat_tables_mem_db?mode=memory&cache=shared"); - verify_copy_all(src, dst, None, Flat).await; - } - - #[actix_rt::test] - async fn copy_flat_from_flat_with_hash_tables() { - let src = PathBuf::from("../tests/fixtures/files/zoomed_world_cities.mbtiles"); - let dst = PathBuf::from( - "file:copy_flat_from_flat_with_hash_tables_mem_db?mode=memory&cache=shared", - ); - verify_copy_all(src, dst, Some(Flat), Flat).await; - } - - #[actix_rt::test] - async fn copy_flat_from_normalized_tables() { - let src = PathBuf::from("../tests/fixtures/files/geography-class-png.mbtiles"); - let dst = - PathBuf::from("file:copy_flat_from_normalized_tables_mem_db?mode=memory&cache=shared"); - verify_copy_all(src, dst, Some(Flat), Flat).await; - } - - #[actix_rt::test] - async fn copy_flat_with_hash_tables() { - let src = PathBuf::from("../tests/fixtures/files/zoomed_world_cities.mbtiles"); - let dst = PathBuf::from("file:copy_flat_with_hash_tables_mem_db?mode=memory&cache=shared"); - verify_copy_all(src, dst, None, FlatWithHash).await; - } - - #[actix_rt::test] - async fn copy_flat_with_hash_from_flat_tables() { - let src = PathBuf::from("../tests/fixtures/files/world_cities.mbtiles"); - let dst = PathBuf::from( - "file:copy_flat_with_hash_from_flat_tables_mem_db?mode=memory&cache=shared", - ); - verify_copy_all(src, dst, Some(FlatWithHash), FlatWithHash).await; - } - - #[actix_rt::test] - async fn copy_flat_with_hash_from_normalized_tables() { - let src = PathBuf::from("../tests/fixtures/files/geography-class-png.mbtiles"); - let dst = PathBuf::from( - "file:copy_flat_with_hash_from_normalized_tables_mem_db?mode=memory&cache=shared", - ); - verify_copy_all(src, dst, Some(FlatWithHash), FlatWithHash).await; - } - - #[actix_rt::test] - async fn copy_normalized_tables() { - let src = PathBuf::from("../tests/fixtures/files/geography-class-png.mbtiles"); - let dst = PathBuf::from("file:copy_normalized_tables_mem_db?mode=memory&cache=shared"); - verify_copy_all(src, dst, None, Normalized).await; - } - - #[actix_rt::test] - async fn copy_normalized_from_flat_tables() { - let src = PathBuf::from("../tests/fixtures/files/world_cities.mbtiles"); - let dst = - PathBuf::from("file:copy_normalized_from_flat_tables_mem_db?mode=memory&cache=shared"); - verify_copy_all(src, dst, Some(Normalized), Normalized).await; - } - - #[actix_rt::test] - async fn copy_normalized_from_flat_with_hash_tables() { - let src = PathBuf::from("../tests/fixtures/files/zoomed_world_cities.mbtiles"); - let dst = PathBuf::from( - "file:copy_normalized_from_flat_with_hash_tables_mem_db?mode=memory&cache=shared", - ); - verify_copy_all(src, dst, Some(Normalized), Normalized).await; - } - - #[actix_rt::test] - async fn copy_with_min_max_zoom() { - let src = PathBuf::from("../tests/fixtures/files/world_cities.mbtiles"); - let dst = PathBuf::from("file:copy_with_min_max_zoom_mem_db?mode=memory&cache=shared"); - let opt = TileCopierOptions::new(src, dst) - .min_zoom(Some(2)) - .max_zoom(Some(4)); - verify_copy_with_zoom_filter(opt, 3).await; - } - - #[actix_rt::test] - async fn copy_with_zoom_levels() { - let src = PathBuf::from("../tests/fixtures/files/world_cities.mbtiles"); - let dst = PathBuf::from("file:copy_with_zoom_levels_mem_db?mode=memory&cache=shared"); - let opt = TileCopierOptions::new(src, dst) - .min_zoom(Some(2)) - .max_zoom(Some(4)) - .zoom_levels(vec![1, 6]); - verify_copy_with_zoom_filter(opt, 2).await; - } - - #[actix_rt::test] - async fn copy_with_diff_with_file() { - let src = PathBuf::from("../tests/fixtures/files/geography-class-jpg.mbtiles"); - let dst = PathBuf::from("file:copy_with_diff_with_file_mem_db?mode=memory&cache=shared"); - - let diff_file = - PathBuf::from("../tests/fixtures/files/geography-class-jpg-modified.mbtiles"); - - let copy_opts = - TileCopierOptions::new(src.clone(), dst.clone()).diff_with_file(diff_file.clone()); - - let mut dst_conn = copy_mbtiles_file(copy_opts).await.unwrap(); - - assert!(query("SELECT 1 FROM sqlite_schema WHERE name = 'tiles';") - .fetch_optional(&mut dst_conn) - .await - .unwrap() - .is_some()); - - assert_eq!( - get_one::(&mut dst_conn, "SELECT COUNT(*) FROM map;").await, - 3 - ); - - assert!(get_one::>( - &mut dst_conn, - "SELECT * FROM tiles WHERE zoom_level = 2 AND tile_row = 2 AND tile_column = 2;" - ) - .await - .is_some()); - - assert!(get_one::>( - &mut dst_conn, - "SELECT * FROM tiles WHERE zoom_level = 1 AND tile_row = 1 AND tile_column = 1;" - ) - .await - .is_some()); - - assert!(get_one::>( - &mut dst_conn, - "SELECT tile_id FROM map WHERE zoom_level = 0 AND tile_row = 0 AND tile_column = 0;" - ) - .await - .is_none()); - } - - #[actix_rt::test] - async fn ignore_dst_type_when_copy_to_existing() { - let src_file = PathBuf::from("../tests/fixtures/files/world_cities_modified.mbtiles"); - - // Copy the dst file to an in-memory DB - let dst_file = PathBuf::from("../tests/fixtures/files/world_cities.mbtiles"); - let dst = PathBuf::from( - "file:ignore_dst_type_when_copy_to_existing_mem_db?mode=memory&cache=shared", - ); - - let _dst_conn = copy_mbtiles_file(TileCopierOptions::new(dst_file.clone(), dst.clone())) - .await - .unwrap(); - - verify_copy_all(src_file, dst, Some(Normalized), Flat).await; - } - - #[actix_rt::test] - async fn copy_to_existing_abort_mode() { - let src = PathBuf::from("../tests/fixtures/files/world_cities_modified.mbtiles"); - let dst = PathBuf::from("../tests/fixtures/files/world_cities.mbtiles"); - - let copy_opts = - TileCopierOptions::new(src.clone(), dst.clone()).on_duplicate(CopyDuplicateMode::Abort); - - assert!(matches!( - copy_mbtiles_file(copy_opts).await.unwrap_err(), - MbtError::RusqliteError(..) - )); - } - - #[actix_rt::test] - async fn copy_to_existing_override_mode() { - let src_file = PathBuf::from("../tests/fixtures/files/world_cities_modified.mbtiles"); - - // Copy the dst file to an in-memory DB - let dst_file = PathBuf::from("../tests/fixtures/files/world_cities.mbtiles"); - let dst = - PathBuf::from("file:copy_to_existing_override_mode_mem_db?mode=memory&cache=shared"); - - let _dst_conn = copy_mbtiles_file(TileCopierOptions::new(dst_file.clone(), dst.clone())) - .await - .unwrap(); - - let mut dst_conn = copy_mbtiles_file(TileCopierOptions::new(src_file.clone(), dst.clone())) - .await - .unwrap(); - - // Verify the tiles in the destination file is a superset of the tiles in the source file - query("ATTACH DATABASE ? AS otherDb") - .bind(src_file.clone().to_str().unwrap()) - .execute(&mut dst_conn) - .await - .unwrap(); - - assert!( - query("SELECT * FROM otherDb.tiles EXCEPT SELECT * FROM tiles;") - .fetch_optional(&mut dst_conn) - .await - .unwrap() - .is_none() - ); - } - - #[actix_rt::test] - async fn copy_to_existing_ignore_mode() { - let src_file = PathBuf::from("../tests/fixtures/files/world_cities_modified.mbtiles"); - - // Copy the dst file to an in-memory DB - let dst_file = PathBuf::from("../tests/fixtures/files/world_cities.mbtiles"); - let dst = - PathBuf::from("file:copy_to_existing_ignore_mode_mem_db?mode=memory&cache=shared"); - - let _dst_conn = copy_mbtiles_file(TileCopierOptions::new(dst_file.clone(), dst.clone())) - .await - .unwrap(); - - let mut dst_conn = copy_mbtiles_file( - TileCopierOptions::new(src_file.clone(), dst.clone()) - .on_duplicate(CopyDuplicateMode::Ignore), - ) - .await - .unwrap(); - - // Verify the tiles in the destination file are the same as those in the source file except for those with duplicate (zoom_level, tile_column, tile_row) - query("ATTACH DATABASE ? AS srcDb") - .bind(src_file.clone().to_str().unwrap()) - .execute(&mut dst_conn) - .await - .unwrap(); - query("ATTACH DATABASE ? AS originalDb") - .bind(dst_file.clone().to_str().unwrap()) - .execute(&mut dst_conn) - .await - .unwrap(); - // Create a temporary table with all the tiles in the original database and - // all the tiles in the source database except for those that conflict with tiles in the original database - query("CREATE TEMP TABLE expected_tiles AS - SELECT COALESCE(t1.zoom_level, t2.zoom_level) as zoom_level, - COALESCE(t1.tile_column, t2.zoom_level) as tile_column, - COALESCE(t1.tile_row, t2.tile_row) as tile_row, - COALESCE(t1.tile_data, t2.tile_data) as tile_data - FROM originalDb.tiles as t1 - FULL OUTER JOIN srcDb.tiles as t2 - ON t1.zoom_level = t2.zoom_level AND t1.tile_column = t2.tile_column AND t1.tile_row = t2.tile_row") - .execute(&mut dst_conn) - .await - .unwrap(); - - // Ensure all entries in expected_tiles are in tiles and vice versa - assert!(query( - "SELECT * FROM expected_tiles EXCEPT SELECT * FROM tiles - UNION - SELECT * FROM tiles EXCEPT SELECT * FROM expected_tiles" - ) - .fetch_optional(&mut dst_conn) - .await - .unwrap() - .is_none()); - } - - #[actix_rt::test] - async fn apply_flat_diff_file() { - // Copy the src file to an in-memory DB - let src_file = PathBuf::from("../tests/fixtures/files/world_cities.mbtiles"); - let src = PathBuf::from("file:apply_flat_diff_file_mem_db?mode=memory&cache=shared"); - - let mut src_conn = copy_mbtiles_file(TileCopierOptions::new(src_file.clone(), src.clone())) - .await - .unwrap(); - - // Apply diff to the src data in in-memory DB - let diff_file = PathBuf::from("../tests/fixtures/files/world_cities_diff.mbtiles"); - apply_mbtiles_diff(src, diff_file).await.unwrap(); - - // Verify the data is the same as the file the diff was generated from - let path = "../tests/fixtures/files/world_cities_modified.mbtiles"; - query!("ATTACH DATABASE ? AS otherDb", path) - .execute(&mut src_conn) - .await - .unwrap(); - - assert!( - query("SELECT * FROM tiles EXCEPT SELECT * FROM otherDb.tiles;") - .fetch_optional(&mut src_conn) - .await - .unwrap() - .is_none() - ); - } - - #[actix_rt::test] - async fn apply_normalized_diff_file() { - // Copy the src file to an in-memory DB - let src_file = PathBuf::from("../tests/fixtures/files/geography-class-jpg.mbtiles"); - let src = PathBuf::from("file:apply_normalized_diff_file_mem_db?mode=memory&cache=shared"); - - let mut src_conn = copy_mbtiles_file(TileCopierOptions::new(src_file.clone(), src.clone())) - .await - .unwrap(); - - // Apply diff to the src data in in-memory DB - let diff_file = PathBuf::from("../tests/fixtures/files/geography-class-jpg-diff.mbtiles"); - apply_mbtiles_diff(src, diff_file).await.unwrap(); - - // Verify the data is the same as the file the diff was generated from - let path = "../tests/fixtures/files/geography-class-jpg-modified.mbtiles"; - query!("ATTACH DATABASE ? AS otherDb", path) - .execute(&mut src_conn) - .await - .unwrap(); - - assert!( - query("SELECT * FROM tiles EXCEPT SELECT * FROM otherDb.tiles;") - .fetch_optional(&mut src_conn) - .await - .unwrap() - .is_none() - ); - } -} diff --git a/martin-tile-utils/Cargo.toml b/martin-tile-utils/Cargo.toml index a1fdd0d6d..4d60a9d38 100644 --- a/martin-tile-utils/Cargo.toml +++ b/martin-tile-utils/Cargo.toml @@ -1,9 +1,10 @@ [package] name = "martin-tile-utils" -version = "0.1.2" +version = "0.1.4" authors = ["Yuri Astrakhan ", "MapLibre contributors"] description = "Utilites to help with map tile processing, such as type and compression detection. Used by the MapLibre's Martin tile server." keywords = ["maps", "tiles", "mvt", "tileserver"] +categories = ["science::geo", "parsing"] exclude = [ # Exclude the fixtures directory from the package - it's only used for tests. "/fixtures", diff --git a/martin-tile-utils/README.md b/martin-tile-utils/README.md index 813c13ec4..ab5520971 100644 --- a/martin-tile-utils/README.md +++ b/martin-tile-utils/README.md @@ -4,6 +4,21 @@ [![Slack chat](https://img.shields.io/badge/Chat-on%20Slack-blueviolet)](https://slack.openstreetmap.us/) [![GitHub](https://img.shields.io/badge/github-maplibre/martin-8da0cb?logo=github)](https://github.com/maplibre/martin) [![crates.io version](https://img.shields.io/crates/v/martin-tile-utils.svg)](https://crates.io/crates/martin-tile-utils) -[![CI build](https://github.com/maplibre/martin/workflows/CI/badge.svg)](https://github.com/maplibre/martin-tile-utils/actions) +[![CI build](https://github.com/maplibre/martin/actions/workflows/ci.yml/badge.svg)](https://github.com/maplibre/martin-tile-utils/actions) A library to help tile servers like [Martin](https://maplibre.org/martin) work with tile content. + +## License + +Licensed under either of + +* Apache License, Version 2.0 ([LICENSE-APACHE](LICENSE-APACHE) or ) +* MIT license ([LICENSE-MIT](LICENSE-MIT) or ) + at your option. + +### Contribution + +Unless you explicitly state otherwise, any contribution intentionally +submitted for inclusion in the work by you, as defined in the +Apache-2.0 license, shall be dual licensed as above, without any +additional terms or conditions. diff --git a/martin-tile-utils/src/lib.rs b/martin-tile-utils/src/lib.rs index 5ff13b003..ceb8a5ab2 100644 --- a/martin-tile-utils/src/lib.rs +++ b/martin-tile-utils/src/lib.rs @@ -1,3 +1,5 @@ +#![doc = include_str!("../README.md")] + // This code was partially adapted from https://github.com/maplibre/mbtileserver-rs // project originally written by Kaveh Karimi and licensed under MIT/Apache-2.0 diff --git a/martin/Cargo.toml b/martin/Cargo.toml index 31bd47f4b..16998806a 100644 --- a/martin/Cargo.toml +++ b/martin/Cargo.toml @@ -1,11 +1,11 @@ [package] name = "martin" -# Make sure to update /home/nyurik/dev/rust/martin/homebrew-formula/martin.rb version -# Once the release is published with the hash -version = "0.9.0-pre.1" +# Once the release is published with the hash, update https://github.com/maplibre/homebrew-martin +version = "0.10.1" authors = ["Stepan Kuzmin ", "Yuri Astrakhan ", "MapLibre contributors"] description = "Blazing fast and lightweight tile server with PostGIS, MBTiles, and PMTiles support" keywords = ["maps", "tiles", "mbtiles", "pmtiles", "postgis"] +categories = ["science::geo", "web-programming::http-server"] exclude = [ # Tests include a lot of data and other test files that are not needed for the users of the library "/tests", @@ -15,7 +15,7 @@ edition.workspace = true license.workspace = true repository.workspace = true rust-version.workspace = true -readme.workspace = true +readme = "README.md" homepage.workspace = true [package.metadata.deb] @@ -24,7 +24,6 @@ revision = "" maintainer = "Yuri Astrakhan , Stepan Kuzmin , MapLibre contributors" maintainer-scripts = "../debian" depends = "$auto" -features = ["ssl"] assets = [ ["target/release/martin", "/usr/bin/martin", "755"], ["target/release/mbtiles", "/usr/bin/mbtiles", "755"], @@ -50,8 +49,6 @@ path = "src/bin/main.rs" [features] default = [] -vendored-openssl = ["ssl", "openssl?/vendored"] -ssl = ["dep:openssl", "dep:postgres-openssl"] bless-tests = [] [dependencies] @@ -59,25 +56,29 @@ actix-cors.workspace = true actix-http.workspace = true actix-rt.workspace = true actix-web.workspace = true -actix.workspace = true async-trait.workspace = true +bit-set.workspace = true brotli.workspace = true clap.workspace = true deadpool-postgres.workspace = true env_logger.workspace = true -json-patch.workspace = true flate2.workspace = true futures.workspace = true itertools.workspace = true +json-patch.workspace = true log.workspace = true -martin-mbtiles.workspace = true martin-tile-utils.workspace = true +mbtiles.workspace = true num_cpus.workspace = true +pbf_font_tools.workspace = true pmtiles.workspace = true postgis.workspace = true postgres-protocol.workspace = true postgres.workspace = true regex.workspace = true +rustls-native-certs.workspace = true +rustls-pemfile.workspace = true +rustls.workspace = true semver.workspace = true serde.workspace = true serde_json = { workspace = true, features = ["preserve_order"] } @@ -87,16 +88,14 @@ subst.workspace = true thiserror.workspace = true tilejson.workspace = true tokio = { workspace = true, features = ["io-std"] } - -# Optional dependencies for ssl support -openssl = { workspace = true, optional = true } -postgres-openssl = { workspace = true, optional = true } +tokio-postgres-rustls.workspace = true [dev-dependencies] cargo-husky.workspace = true criterion.workspace = true ctor.workspace = true indoc.workspace = true +insta = { workspace = true, features = ["yaml"] } #test-log = "0.2" [[bench]] diff --git a/martin/src/args/mod.rs b/martin/src/args/mod.rs index 6866d87ba..daa8a6567 100644 --- a/martin/src/args/mod.rs +++ b/martin/src/args/mod.rs @@ -6,4 +6,5 @@ mod srv; pub use connections::{Arguments, State}; pub use environment::{Env, OsEnv}; -pub use root::Args; +pub use pg::{BoundsCalcType, DEFAULT_BOUNDS_TIMEOUT}; +pub use root::{Args, MetaArgs}; diff --git a/martin/src/args/pg.rs b/martin/src/args/pg.rs index 88c9a1f86..2cf18c711 100644 --- a/martin/src/args/pg.rs +++ b/martin/src/args/pg.rs @@ -1,19 +1,37 @@ +use std::time::Duration; + +use clap::ValueEnum; use log::{info, warn}; +use serde::{Deserialize, Serialize}; use crate::args::connections::Arguments; use crate::args::connections::State::{Ignore, Take}; use crate::args::environment::Env; use crate::pg::{PgConfig, PgSslCerts, POOL_SIZE_DEFAULT}; -use crate::utils::OneOrMany; +use crate::utils::{OptBoolObj, OptOneMany}; + +// Must match the help string for BoundsType::Quick +pub const DEFAULT_BOUNDS_TIMEOUT: Duration = Duration::from_secs(5); + +#[derive(PartialEq, Eq, Default, Debug, Clone, Copy, Serialize, Deserialize, ValueEnum)] +#[serde(rename_all = "lowercase")] +pub enum BoundsCalcType { + /// Compute table geometry bounds, but abort if it takes longer than 5 seconds. + #[default] + Quick, + /// Compute table geometry bounds. The startup time may be significant. Make sure all GEO columns have indexes. + Calc, + /// Skip bounds calculation. The bounds will be set to the whole world. + Skip, +} #[derive(clap::Args, Debug, PartialEq, Default)] #[command(about, version)] pub struct PgArgs { - /// Disable the automatic generation of bounds for spatial PG tables. + /// Specify how bounds should be computed for the spatial PG tables. [DEFAULT: quick] #[arg(short = 'b', long)] - pub disable_bounds: bool, + pub auto_bounds: Option, /// Loads trusted root certificates from a file. The file should contain a sequence of PEM-formatted CA certificates. - #[cfg(feature = "ssl")] #[arg(long)] pub ca_root_file: Option, /// If a spatial PG table has SRID 0, then this default SRID will be used as a fallback. @@ -31,7 +49,7 @@ impl PgArgs { self, cli_strings: &mut Arguments, env: &impl Env<'a>, - ) -> Option> { + ) -> OptOneMany { let connections = Self::extract_conn_strings(cli_strings, env); let default_srid = self.get_default_srid(env); let certs = self.get_certs(env); @@ -42,27 +60,23 @@ impl PgArgs { connection_string: Some(s), ssl_certificates: certs.clone(), default_srid, - disable_bounds: if self.disable_bounds { - Some(true) - } else { - None - }, + auto_bounds: self.auto_bounds, max_feature_count: self.max_feature_count, pool_size: self.pool_size, - auto_publish: None, + auto_publish: OptBoolObj::NoValue, tables: None, functions: None, }) .collect(); match results.len() { - 0 => None, - 1 => Some(OneOrMany::One(results.into_iter().next().unwrap())), - _ => Some(OneOrMany::Many(results)), + 0 => OptOneMany::NoVals, + 1 => OptOneMany::One(results.into_iter().next().unwrap()), + _ => OptOneMany::Many(results), } } - pub fn override_config<'a>(self, pg_config: &mut OneOrMany, env: &impl Env<'a>) { + pub fn override_config<'a>(self, pg_config: &mut OptOneMany, env: &impl Env<'a>) { if self.default_srid.is_some() { info!("Overriding configured default SRID to {} on all Postgres connections because of a CLI parameter", self.default_srid.unwrap()); pg_config.iter_mut().for_each(|c| { @@ -82,7 +96,6 @@ impl PgArgs { }); } - #[cfg(feature = "ssl")] if self.ca_root_file.is_some() { info!("Overriding root certificate file to {} on all Postgres connections because of a CLI parameter", self.ca_root_file.as_ref().unwrap().display()); @@ -145,13 +158,6 @@ impl PgArgs { }) } - #[cfg(not(feature = "ssl"))] - #[allow(clippy::unused_self)] - fn get_certs<'a>(&self, _env: &impl Env<'a>) -> PgSslCerts { - PgSslCerts {} - } - - #[cfg(feature = "ssl")] fn get_certs<'a>(&self, env: &impl Env<'a>) -> PgSslCerts { let mut result = PgSslCerts { ssl_cert: Self::parse_env_var(env, "PGSSLCERT", "ssl certificate"), @@ -172,7 +178,6 @@ impl PgArgs { result } - #[cfg(feature = "ssl")] fn parse_env_var<'a>( env: &impl Env<'a>, env_var: &str, @@ -194,7 +199,6 @@ fn is_postgresql_string(s: &str) -> bool { #[cfg(test)] mod tests { - #[cfg(feature = "ssl")] use std::path::PathBuf; use super::*; @@ -235,10 +239,10 @@ mod tests { let config = PgArgs::default().into_config(&mut args, &FauxEnv::default()); assert_eq!( config, - Some(OneOrMany::One(PgConfig { + OptOneMany::One(PgConfig { connection_string: some("postgres://localhost:5432"), ..Default::default() - })) + }) ); assert!(args.check().is_ok()); } @@ -259,16 +263,15 @@ mod tests { let config = PgArgs::default().into_config(&mut args, &env); assert_eq!( config, - Some(OneOrMany::One(PgConfig { + OptOneMany::One(PgConfig { connection_string: some("postgres://localhost:5432"), default_srid: Some(10), - #[cfg(feature = "ssl")] ssl_certificates: PgSslCerts { ssl_root_cert: Some(PathBuf::from("file")), ..Default::default() }, ..Default::default() - })) + }) ); assert!(args.check().is_ok()); } @@ -294,17 +297,16 @@ mod tests { let config = pg_args.into_config(&mut args, &env); assert_eq!( config, - Some(OneOrMany::One(PgConfig { + OptOneMany::One(PgConfig { connection_string: some("postgres://localhost:5432"), default_srid: Some(20), - #[cfg(feature = "ssl")] ssl_certificates: PgSslCerts { ssl_cert: Some(PathBuf::from("cert")), ssl_key: Some(PathBuf::from("key")), ssl_root_cert: Some(PathBuf::from("root")), }, ..Default::default() - })) + }) ); assert!(args.check().is_ok()); } diff --git a/martin/src/args/root.rs b/martin/src/args/root.rs index 1201279b9..fe15d2494 100644 --- a/martin/src/args/root.rs +++ b/martin/src/args/root.rs @@ -10,7 +10,7 @@ use crate::args::srv::SrvArgs; use crate::args::State::{Ignore, Share, Take}; use crate::config::Config; use crate::file_config::FileConfigEnum; -use crate::{Error, Result}; +use crate::{Error, OptOneMany, Result}; #[derive(Parser, Debug, PartialEq, Default)] #[command(about, version)] @@ -36,7 +36,7 @@ pub struct MetaArgs { /// By default, only print if sources are auto-detected. #[arg(long)] pub save_config: Option, - /// [Deprecated] Scan for new sources on sources list requests + /// **Deprecated** Scan for new sources on sources list requests #[arg(short, long, hide = true)] pub watch: bool, /// Connection strings, e.g. postgres://... or /path/to/files @@ -44,6 +44,9 @@ pub struct MetaArgs { /// Export a directory with SVG files as a sprite source. Can be specified multiple times. #[arg(short, long)] pub sprite: Vec, + /// Export a font file or a directory with font files as a font source (recursive). Can be specified multiple times. + #[arg(short, long)] + pub font: Vec, } impl Args { @@ -55,18 +58,18 @@ impl Args { warn!("The WATCH_MODE env variable is no longer supported, and will be ignored"); } if self.meta.config.is_some() && !self.meta.connection.is_empty() { - return Err(Error::ConfigAndConnectionsError); + return Err(Error::ConfigAndConnectionsError(self.meta.connection)); } self.srv.merge_into_config(&mut config.srv); let mut cli_strings = Arguments::new(self.meta.connection); let pg_args = self.pg.unwrap_or_default(); - if let Some(pg_config) = &mut config.postgres { - // config was loaded from a file, we can only apply a few CLI overrides to it - pg_args.override_config(pg_config, env); - } else { + if config.postgres.is_none() { config.postgres = pg_args.into_config(&mut cli_strings, env); + } else { + // config was loaded from a file, we can only apply a few CLI overrides to it + pg_args.override_config(&mut config.postgres, env); } if !cli_strings.is_empty() { @@ -81,11 +84,15 @@ impl Args { config.sprites = FileConfigEnum::new(self.meta.sprite); } + if !self.meta.font.is_empty() { + config.fonts = OptOneMany::new(self.meta.font); + } + cli_strings.check() } } -pub fn parse_file_args(cli_strings: &mut Arguments, extension: &str) -> Option { +pub fn parse_file_args(cli_strings: &mut Arguments, extension: &str) -> FileConfigEnum { let paths = cli_strings.process(|v| match PathBuf::try_from(v) { Ok(v) => { if v.is_dir() { @@ -107,7 +114,7 @@ mod tests { use super::*; use crate::pg::PgConfig; use crate::test_utils::{some, FauxEnv}; - use crate::utils::OneOrMany; + use crate::utils::OptOneMany; fn parse(args: &[&str]) -> Result<(Config, MetaArgs)> { let args = Args::parse_from(args); @@ -143,10 +150,10 @@ mod tests { let args = parse(&["martin", "postgres://connection"]).unwrap(); let cfg = Config { - postgres: Some(OneOrMany::One(PgConfig { + postgres: OptOneMany::One(PgConfig { connection_string: some("postgres://connection"), ..Default::default() - })), + }), ..Default::default() }; let meta = MetaArgs { @@ -174,7 +181,7 @@ mod tests { let env = FauxEnv::default(); let mut config = Config::default(); let err = args.merge_into_config(&mut config, &env).unwrap_err(); - assert!(matches!(err, crate::Error::ConfigAndConnectionsError)); + assert!(matches!(err, crate::Error::ConfigAndConnectionsError(..))); } #[test] diff --git a/martin/src/bin/main.rs b/martin/src/bin/main.rs index c97ac21f4..6501c098a 100644 --- a/martin/src/bin/main.rs +++ b/martin/src/bin/main.rs @@ -58,7 +58,7 @@ async fn start(args: Args) -> Result { #[actix_web::main] async fn main() { - let env = env_logger::Env::default().filter_or(env_logger::DEFAULT_FILTER_ENV, "martin=info"); + let env = env_logger::Env::default().default_filter_or("martin=info"); env_logger::Builder::from_env(env).init(); start(Args::parse()) diff --git a/martin/src/config.rs b/martin/src/config.rs index b477fd6f4..625485e01 100644 --- a/martin/src/config.rs +++ b/martin/src/config.rs @@ -2,7 +2,7 @@ use std::collections::HashMap; use std::fs::File; use std::future::Future; use std::io::prelude::*; -use std::path::Path; +use std::path::{Path, PathBuf}; use std::pin::Pin; use futures::future::try_join_all; @@ -10,20 +10,22 @@ use serde::{Deserialize, Serialize}; use subst::VariableMap; use crate::file_config::{resolve_files, FileConfigEnum}; +use crate::fonts::FontSources; use crate::mbtiles::MbtSource; use crate::pg::PgConfig; use crate::pmtiles::PmtSource; -use crate::source::Sources; -use crate::sprites::{resolve_sprites, SpriteSources}; +use crate::source::{TileInfoSources, TileSources}; +use crate::sprites::SpriteSources; use crate::srv::SrvConfig; -use crate::utils::{IdResolver, OneOrMany, Result}; use crate::Error::{ConfigLoadError, ConfigParseError, NoSources}; +use crate::{IdResolver, OptOneMany, Result}; pub type UnrecognizedValues = HashMap; -pub struct AllSources { - pub sources: Sources, +pub struct ServerState { + pub tiles: TileSources, pub sprites: SpriteSources, + pub fonts: FontSources, } #[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize)] @@ -31,17 +33,20 @@ pub struct Config { #[serde(flatten)] pub srv: SrvConfig, - #[serde(skip_serializing_if = "Option::is_none")] - pub postgres: Option>, + #[serde(default, skip_serializing_if = "OptOneMany::is_none")] + pub postgres: OptOneMany, - #[serde(skip_serializing_if = "Option::is_none")] - pub pmtiles: Option, + #[serde(default, skip_serializing_if = "FileConfigEnum::is_none")] + pub pmtiles: FileConfigEnum, - #[serde(skip_serializing_if = "Option::is_none")] - pub mbtiles: Option, + #[serde(default, skip_serializing_if = "FileConfigEnum::is_none")] + pub mbtiles: FileConfigEnum, - #[serde(skip_serializing_if = "Option::is_none")] - pub sprites: Option, + #[serde(default, skip_serializing_if = "FileConfigEnum::is_none")] + pub sprites: FileConfigEnum, + + #[serde(default, skip_serializing_if = "OptOneMany::is_none")] + pub fonts: OptOneMany, #[serde(flatten)] pub unrecognized: UnrecognizedValues, @@ -53,77 +58,57 @@ impl Config { let mut res = UnrecognizedValues::new(); copy_unrecognized_config(&mut res, "", &self.unrecognized); - let mut any = if let Some(pg) = &mut self.postgres { - for pg in pg.iter_mut() { - res.extend(pg.finalize()?); - } - !pg.is_empty() - } else { - false - }; + for pg in self.postgres.iter_mut() { + res.extend(pg.finalize()?); + } - any |= if let Some(cfg) = &mut self.pmtiles { - res.extend(cfg.finalize("pmtiles.")?); - !cfg.is_empty() - } else { - false - }; + res.extend(self.pmtiles.finalize("pmtiles.")?); + res.extend(self.mbtiles.finalize("mbtiles.")?); + res.extend(self.sprites.finalize("sprites.")?); - any |= if let Some(cfg) = &mut self.mbtiles { - res.extend(cfg.finalize("mbtiles.")?); - !cfg.is_empty() - } else { - false - }; + // TODO: support for unrecognized fonts? + // res.extend(self.fonts.finalize("fonts.")?); - any |= if let Some(cfg) = &mut self.sprites { - res.extend(cfg.finalize("sprites.")?); - !cfg.is_empty() + if self.postgres.is_empty() + && self.pmtiles.is_empty() + && self.mbtiles.is_empty() + && self.sprites.is_empty() + && self.fonts.is_empty() + { + Err(NoSources) } else { - false - }; - - if any { Ok(res) - } else { - Err(NoSources) } } - pub async fn resolve(&mut self, idr: IdResolver) -> Result { + pub async fn resolve(&mut self, idr: IdResolver) -> Result { + Ok(ServerState { + tiles: self.resolve_tile_sources(idr).await?, + sprites: SpriteSources::resolve(&mut self.sprites)?, + fonts: FontSources::resolve(&mut self.fonts)?, + }) + } + + async fn resolve_tile_sources(&mut self, idr: IdResolver) -> Result { let create_pmt_src = &mut PmtSource::new_box; let create_mbt_src = &mut MbtSource::new_box; + let mut sources: Vec>>>> = Vec::new(); - let mut sources: Vec>>>> = Vec::new(); - if let Some(v) = self.postgres.as_mut() { - for s in v.iter_mut() { - sources.push(Box::pin(s.resolve(idr.clone()))); - } + for s in self.postgres.iter_mut() { + sources.push(Box::pin(s.resolve(idr.clone()))); } - if self.pmtiles.is_some() { + + if !self.pmtiles.is_empty() { let val = resolve_files(&mut self.pmtiles, idr.clone(), "pmtiles", create_pmt_src); sources.push(Box::pin(val)); } - if self.mbtiles.is_some() { + if !self.mbtiles.is_empty() { let val = resolve_files(&mut self.mbtiles, idr.clone(), "mbtiles", create_mbt_src); sources.push(Box::pin(val)); } - // Minor in-efficiency: - // Sources are added to a BTreeMap, then iterated over into a sort structure and convert back to a BTreeMap. - // Ideally there should be a vector of values, which is then sorted (in-place?) and converted to a BTreeMap. - Ok(AllSources { - sources: try_join_all(sources) - .await? - .into_iter() - .fold(Sources::default(), |mut acc, hashmap| { - acc.extend(hashmap); - acc - }) - .sort(), - sprites: resolve_sprites(&mut self.sprites)?, - }) + Ok(TileSources::new(try_join_all(sources).await?)) } } diff --git a/martin/src/file_config.rs b/martin/src/file_config.rs index b290356cf..62d8ab30e 100644 --- a/martin/src/file_config.rs +++ b/martin/src/file_config.rs @@ -9,9 +9,9 @@ use serde::{Deserialize, Serialize}; use crate::config::{copy_unrecognized_config, UnrecognizedValues}; use crate::file_config::FileError::{InvalidFilePath, InvalidSourceFilePath, IoError}; -use crate::source::{Source, Sources}; -use crate::utils::{sorted_opt_map, Error, IdResolver, OneOrMany}; -use crate::OneOrMany::{Many, One}; +use crate::source::{Source, TileInfoSources}; +use crate::utils::{sorted_opt_map, Error, IdResolver, OptOneMany}; +use crate::OptOneMany::{Many, One}; #[derive(thiserror::Error, Debug)] pub enum FileError { @@ -31,9 +31,11 @@ pub enum FileError { AquireConnError(String), } -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] +#[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize)] #[serde(untagged)] pub enum FileConfigEnum { + #[default] + None, Path(PathBuf), Paths(Vec), Config(FileConfig), @@ -41,7 +43,7 @@ pub enum FileConfigEnum { impl FileConfigEnum { #[must_use] - pub fn new(paths: Vec) -> Option { + pub fn new(paths: Vec) -> FileConfigEnum { Self::new_extended(paths, HashMap::new(), UnrecognizedValues::new()) } @@ -50,46 +52,70 @@ impl FileConfigEnum { paths: Vec, configs: HashMap, unrecognized: UnrecognizedValues, - ) -> Option { + ) -> FileConfigEnum { if configs.is_empty() && unrecognized.is_empty() { match paths.len() { - 0 => None, - 1 => Some(FileConfigEnum::Path(paths.into_iter().next().unwrap())), - _ => Some(FileConfigEnum::Paths(paths)), + 0 => FileConfigEnum::None, + 1 => FileConfigEnum::Path(paths.into_iter().next().unwrap()), + _ => FileConfigEnum::Paths(paths), } } else { - Some(FileConfigEnum::Config(FileConfig { - paths: OneOrMany::new_opt(paths), + FileConfigEnum::Config(FileConfig { + paths: OptOneMany::new(paths), sources: if configs.is_empty() { None } else { Some(configs) }, unrecognized, - })) + }) + } + } + + #[must_use] + pub fn is_none(&self) -> bool { + matches!(self, Self::None) + } + + #[must_use] + pub fn is_empty(&self) -> bool { + match self { + Self::None => true, + Self::Path(_) => false, + Self::Paths(v) => v.is_empty(), + Self::Config(c) => c.is_empty(), } } - pub fn extract_file_config(&mut self) -> FileConfig { + pub fn extract_file_config(&mut self) -> Option { match self { - FileConfigEnum::Path(path) => FileConfig { - paths: Some(One(mem::take(path))), + FileConfigEnum::None => None, + FileConfigEnum::Path(path) => Some(FileConfig { + paths: One(mem::take(path)), ..FileConfig::default() - }, - FileConfigEnum::Paths(paths) => FileConfig { - paths: Some(Many(mem::take(paths))), + }), + FileConfigEnum::Paths(paths) => Some(FileConfig { + paths: Many(mem::take(paths)), ..Default::default() - }, - FileConfigEnum::Config(cfg) => mem::take(cfg), + }), + FileConfigEnum::Config(cfg) => Some(mem::take(cfg)), } } + + pub fn finalize(&self, prefix: &str) -> Result { + let mut res = UnrecognizedValues::new(); + if let Self::Config(cfg) = self { + copy_unrecognized_config(&mut res, prefix, &cfg.unrecognized); + } + Ok(res) + } } #[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize)] pub struct FileConfig { /// A list of file paths - #[serde(skip_serializing_if = "Option::is_none")] - pub paths: Option>, + #[serde(default, skip_serializing_if = "OptOneMany::is_none")] + pub paths: OptOneMany, /// A map of source IDs to file paths or config objects #[serde(skip_serializing_if = "Option::is_none")] #[serde(serialize_with = "sorted_opt_map")] @@ -128,31 +154,12 @@ pub struct FileConfigSource { pub path: PathBuf, } -impl FileConfigEnum { - pub fn finalize(&self, prefix: &str) -> Result { - let mut res = UnrecognizedValues::new(); - if let Self::Config(cfg) = self { - copy_unrecognized_config(&mut res, prefix, &cfg.unrecognized); - } - Ok(res) - } - - #[must_use] - pub fn is_empty(&self) -> bool { - match self { - Self::Path(_) => false, - Self::Paths(v) => v.is_empty(), - Self::Config(c) => c.is_empty(), - } - } -} - pub async fn resolve_files( - config: &mut Option, + config: &mut FileConfigEnum, idr: IdResolver, extension: &str, create_source: &mut impl FnMut(String, PathBuf) -> Fut, -) -> Result +) -> Result where Fut: Future, FileError>>, { @@ -162,20 +169,19 @@ where } async fn resolve_int( - config: &mut Option, + config: &mut FileConfigEnum, idr: IdResolver, extension: &str, create_source: &mut impl FnMut(String, PathBuf) -> Fut, -) -> Result +) -> Result where Fut: Future, FileError>>, { - let Some(cfg) = config else { - return Ok(Sources::default()); + let Some(cfg) = config.extract_file_config() else { + return Ok(TileInfoSources::default()); }; - let cfg = cfg.extract_file_config(); - let mut results = Sources::default(); + let mut results = TileInfoSources::default(); let mut configs = HashMap::new(); let mut files = HashSet::new(); let mut directories = Vec::new(); @@ -198,54 +204,51 @@ where FileConfigSrc::Obj(pmt) => pmt.path, FileConfigSrc::Path(path) => path, }; - results.insert(id.clone(), create_source(id, path).await?); + results.push(create_source(id, path).await?); } } - if let Some(paths) = cfg.paths { - for path in paths { - let is_dir = path.is_dir(); - let dir_files = if is_dir { - // directories will be kept in the config just in case there are new files - directories.push(path.clone()); - path.read_dir() - .map_err(|e| IoError(e, path.clone()))? - .filter_map(Result::ok) - .filter(|f| { - f.path().extension().filter(|e| *e == extension).is_some() - && f.path().is_file() - }) - .map(|f| f.path()) - .collect() - } else if path.is_file() { - vec![path] - } else { - return Err(InvalidFilePath(path.canonicalize().unwrap_or(path))); - }; - for path in dir_files { - let can = path.canonicalize().map_err(|e| IoError(e, path.clone()))?; - if files.contains(&can) { - if !is_dir { - warn!("Ignoring duplicate MBTiles path: {}", can.display()); - } - continue; + for path in cfg.paths { + let is_dir = path.is_dir(); + let dir_files = if is_dir { + // directories will be kept in the config just in case there are new files + directories.push(path.clone()); + path.read_dir() + .map_err(|e| IoError(e, path.clone()))? + .filter_map(Result::ok) + .filter(|f| { + f.path().extension().filter(|e| *e == extension).is_some() && f.path().is_file() + }) + .map(|f| f.path()) + .collect() + } else if path.is_file() { + vec![path] + } else { + return Err(InvalidFilePath(path.canonicalize().unwrap_or(path))); + }; + for path in dir_files { + let can = path.canonicalize().map_err(|e| IoError(e, path.clone()))?; + if files.contains(&can) { + if !is_dir { + warn!("Ignoring duplicate MBTiles path: {}", can.display()); } - let id = path.file_stem().map_or_else( - || "_unknown".to_string(), - |s| s.to_string_lossy().to_string(), - ); - let source = FileConfigSrc::Path(path); - let id = idr.resolve(&id, can.to_string_lossy().to_string()); - info!("Configured source {id} from {}", can.display()); - files.insert(can); - configs.insert(id.clone(), source.clone()); - - let path = match source { - FileConfigSrc::Obj(pmt) => pmt.path, - FileConfigSrc::Path(path) => path, - }; - results.insert(id.clone(), create_source(id, path).await?); + continue; } + let id = path.file_stem().map_or_else( + || "_unknown".to_string(), + |s| s.to_string_lossy().to_string(), + ); + let source = FileConfigSrc::Path(path); + let id = idr.resolve(&id, can.to_string_lossy().to_string()); + info!("Configured source {id} from {}", can.display()); + files.insert(can); + configs.insert(id.clone(), source.clone()); + + let path = match source { + FileConfigSrc::Obj(pmt) => pmt.path, + FileConfigSrc::Path(path) => path, + }; + results.push(create_source(id, path).await?); } } @@ -280,7 +283,7 @@ mod tests { let FileConfigEnum::Config(cfg) = cfg else { panic!(); }; - let paths = cfg.paths.clone().unwrap().into_iter().collect::>(); + let paths = cfg.paths.clone().into_iter().collect::>(); assert_eq!( paths, vec![ diff --git a/martin/src/fonts/mod.rs b/martin/src/fonts/mod.rs new file mode 100644 index 000000000..1edf21016 --- /dev/null +++ b/martin/src/fonts/mod.rs @@ -0,0 +1,357 @@ +use std::collections::hash_map::Entry; +use std::collections::{BTreeMap, HashMap}; +use std::ffi::OsStr; +use std::fmt::Debug; +use std::path::PathBuf; +use std::sync::OnceLock; + +use bit_set::BitSet; +use itertools::Itertools; +use log::{debug, info, warn}; +use pbf_font_tools::freetype::{Face, Library}; +use pbf_font_tools::protobuf::Message; +use pbf_font_tools::{render_sdf_glyph, Fontstack, Glyphs, PbfFontError}; +use regex::Regex; +use serde::{Deserialize, Serialize}; + +use crate::OptOneMany; + +const MAX_UNICODE_CP: usize = 0xFFFF; +const CP_RANGE_SIZE: usize = 256; +const FONT_SIZE: usize = 24; +#[allow(clippy::cast_possible_wrap)] +const CHAR_HEIGHT: isize = (FONT_SIZE as isize) << 6; +const BUFFER_SIZE: usize = 3; +const RADIUS: usize = 8; +const CUTOFF: f64 = 0.25_f64; + +/// Each range is 256 codepoints long, so the highest range ID is 0xFFFF / 256 = 255. +const MAX_UNICODE_CP_RANGE_ID: usize = MAX_UNICODE_CP / CP_RANGE_SIZE; + +#[derive(thiserror::Error, Debug)] +pub enum FontError { + #[error("Font {0} not found")] + FontNotFound(String), + + #[error("Font range start ({0}) must be <= end ({1})")] + InvalidFontRangeStartEnd(u32, u32), + + #[error("Font range start ({0}) must be multiple of {CP_RANGE_SIZE} (e.g. 0, 256, 512, ...)")] + InvalidFontRangeStart(u32), + + #[error( + "Font range end ({0}) must be multiple of {CP_RANGE_SIZE} - 1 (e.g. 255, 511, 767, ...)" + )] + InvalidFontRangeEnd(u32), + + #[error("Given font range {0}-{1} is invalid. It must be {CP_RANGE_SIZE} characters long (e.g. 0-255, 256-511, ...)")] + InvalidFontRange(u32, u32), + + #[error("FreeType font error: {0}")] + FreeType(#[from] pbf_font_tools::freetype::Error), + + #[error("IO error accessing {}: {0}", .1.display())] + IoError(std::io::Error, PathBuf), + + #[error("Invalid font file {}", .0.display())] + InvalidFontFilePath(PathBuf), + + #[error("No font files found in {}", .0.display())] + NoFontFilesFound(PathBuf), + + #[error("Font {0} is missing a family name")] + MissingFamilyName(PathBuf), + + #[error("PBF Font error: {0}")] + PbfFontError(#[from] PbfFontError), + + #[error("Error serializing protobuf: {0}")] + ErrorSerializingProtobuf(#[from] pbf_font_tools::protobuf::Error), +} + +type GetGlyphInfo = (BitSet, usize, Vec<(usize, usize)>, usize, usize); + +fn get_available_codepoints(face: &mut Face) -> Option { + let mut codepoints = BitSet::with_capacity(MAX_UNICODE_CP); + let mut spans = Vec::new(); + let mut first: Option = None; + let mut count = 0; + + for cp in 0..=MAX_UNICODE_CP { + if face.get_char_index(cp) != 0 { + codepoints.insert(cp); + count += 1; + if first.is_none() { + first = Some(cp); + } + } else if let Some(start) = first { + spans.push((start, cp - 1)); + first = None; + } + } + + if count == 0 { + None + } else { + let start = spans[0].0; + let end = spans[spans.len() - 1].1; + Some((codepoints, count, spans, start, end)) + } +} + +#[derive(Debug, Clone, Default)] +pub struct FontSources { + fonts: HashMap, + masks: Vec, +} + +pub type FontCatalog = BTreeMap; + +#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)] +pub struct CatalogFontEntry { + pub family: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub style: Option, + pub glyphs: usize, + pub start: usize, + pub end: usize, +} + +impl FontSources { + pub fn resolve(config: &mut OptOneMany) -> Result { + if config.is_empty() { + return Ok(Self::default()); + } + + let mut fonts = HashMap::new(); + let lib = Library::init()?; + + for path in config.iter() { + recurse_dirs(&lib, path.clone(), &mut fonts, true)?; + } + + let mut masks = Vec::with_capacity(MAX_UNICODE_CP_RANGE_ID + 1); + + let mut bs = BitSet::with_capacity(CP_RANGE_SIZE); + for v in 0..=MAX_UNICODE_CP { + bs.insert(v); + if v % CP_RANGE_SIZE == (CP_RANGE_SIZE - 1) { + masks.push(bs); + bs = BitSet::with_capacity(CP_RANGE_SIZE); + } + } + + Ok(Self { fonts, masks }) + } + + #[must_use] + pub fn get_catalog(&self) -> FontCatalog { + self.fonts + .iter() + .map(|(k, v)| (k.clone(), v.catalog_entry.clone())) + .sorted_by(|(a, _), (b, _)| a.cmp(b)) + .collect() + } + + /// Given a list of IDs in a format "id1,id2,id3", return a combined font. + #[allow(clippy::cast_possible_truncation)] + pub fn get_font_range(&self, ids: &str, start: u32, end: u32) -> Result, FontError> { + if start > end { + return Err(FontError::InvalidFontRangeStartEnd(start, end)); + } + if start % (CP_RANGE_SIZE as u32) != 0 { + return Err(FontError::InvalidFontRangeStart(start)); + } + if end % (CP_RANGE_SIZE as u32) != (CP_RANGE_SIZE as u32 - 1) { + return Err(FontError::InvalidFontRangeEnd(end)); + } + if (end - start) != (CP_RANGE_SIZE as u32 - 1) { + return Err(FontError::InvalidFontRange(start, end)); + } + + let mut needed = self.masks[(start as usize) / CP_RANGE_SIZE].clone(); + let fonts = ids + .split(',') + .filter_map(|id| match self.fonts.get(id) { + None => Some(Err(FontError::FontNotFound(id.to_string()))), + Some(v) => { + let mut ds = needed.clone(); + ds.intersect_with(&v.codepoints); + if ds.is_empty() { + None + } else { + needed.difference_with(&v.codepoints); + Some(Ok((id, v, ds))) + } + } + }) + .collect::, FontError>>()?; + + if fonts.is_empty() { + return Ok(Vec::new()); + } + + let lib = Library::init()?; + let mut stack = Fontstack::new(); + + for (id, font, ds) in fonts { + if stack.has_name() { + let name = stack.mut_name(); + name.push_str(", "); + name.push_str(id); + } else { + stack.set_name(id.to_string()); + } + + let face = lib.new_face(&font.path, font.face_index)?; + + // FreeType conventions: char width or height of zero means "use the same value" + // and setting both resolution values to zero results in the default value + // of 72 dpi. + // + // See https://www.freetype.org/freetype2/docs/reference/ft2-base_interface.html#ft_set_char_size + // and https://www.freetype.org/freetype2/docs/tutorial/step1.html for details. + face.set_char_size(0, CHAR_HEIGHT, 0, 0)?; + + for cp in &ds { + let glyph = render_sdf_glyph(&face, cp as u32, BUFFER_SIZE, RADIUS, CUTOFF)?; + stack.glyphs.push(glyph); + } + } + + stack.set_range(format!("{start}-{end}")); + + let mut glyphs = Glyphs::new(); + glyphs.stacks.push(stack); + let mut result = Vec::new(); + glyphs.write_to_vec(&mut result)?; + Ok(result) + } +} + +#[derive(Clone, Debug)] +pub struct FontSource { + path: PathBuf, + face_index: isize, + codepoints: BitSet, + catalog_entry: CatalogFontEntry, +} + +fn recurse_dirs( + lib: &Library, + path: PathBuf, + fonts: &mut HashMap, + is_top_level: bool, +) -> Result<(), FontError> { + let start_count = fonts.len(); + if path.is_dir() { + for dir_entry in path + .read_dir() + .map_err(|e| FontError::IoError(e, path.clone()))? + .flatten() + { + recurse_dirs(lib, dir_entry.path(), fonts, false)?; + } + if is_top_level && fonts.len() == start_count { + return Err(FontError::NoFontFilesFound(path)); + } + } else { + if path + .extension() + .and_then(OsStr::to_str) + .is_some_and(|e| ["otf", "ttf", "ttc"].contains(&e)) + { + parse_font(lib, fonts, path.clone())?; + } + if is_top_level && fonts.len() == start_count { + return Err(FontError::InvalidFontFilePath(path)); + } + } + + Ok(()) +} + +fn parse_font( + lib: &Library, + fonts: &mut HashMap, + path: PathBuf, +) -> Result<(), FontError> { + static RE_SPACES: OnceLock = OnceLock::new(); + + let mut face = lib.new_face(&path, 0)?; + let num_faces = face.num_faces() as isize; + for face_index in 0..num_faces { + if face_index > 0 { + face = lib.new_face(&path, face_index)?; + } + let Some(family) = face.family_name() else { + return Err(FontError::MissingFamilyName(path)); + }; + let mut name = family.clone(); + let style = face.style_name(); + if let Some(style) = &style { + name.push(' '); + name.push_str(style); + } + // Make sure font name has no slashes or commas, replacing them with spaces and de-duplicating spaces + name = RE_SPACES + .get_or_init(|| Regex::new(r"(\s|/|,)+").unwrap()) + .replace_all(name.as_str(), " ") + .to_string(); + + match fonts.entry(name) { + Entry::Occupied(v) => { + warn!( + "Ignoring duplicate font {} from {} because it was already configured from {}", + v.key(), + path.display(), + v.get().path.display() + ); + } + Entry::Vacant(v) => { + let key = v.key(); + let Some((codepoints, glyphs, ranges, start, end)) = + get_available_codepoints(&mut face) + else { + warn!( + "Ignoring font {key} from {} because it has no available glyphs", + path.display() + ); + continue; + }; + + info!( + "Configured font {key} with {glyphs} glyphs ({start:04X}-{end:04X}) from {}", + path.display() + ); + debug!( + "Available font ranges: {}", + ranges + .iter() + .map(|(s, e)| if s == e { + format!("{s:02X}") + } else { + format!("{s:02X}-{e:02X}") + }) + .collect::>() + .join(", "), + ); + + v.insert(FontSource { + path: path.clone(), + face_index, + codepoints, + catalog_entry: CatalogFontEntry { + family, + style, + glyphs, + start, + end, + }, + }); + } + } + } + + Ok(()) +} diff --git a/martin/src/lib.rs b/martin/src/lib.rs index 9c560b75b..1feb9843c 100644 --- a/martin/src/lib.rs +++ b/martin/src/lib.rs @@ -1,3 +1,5 @@ +#![doc = include_str!("../README.md")] +#![forbid(unsafe_code)] #![warn(clippy::pedantic)] // Bounds struct derives PartialEq, but not Eq, // so all containing types must also derive PartialEq without Eq @@ -10,6 +12,7 @@ pub mod args; mod config; pub mod file_config; +pub mod fonts; pub mod mbtiles; pub mod pg; pub mod pmtiles; @@ -17,6 +20,7 @@ mod source; pub mod sprites; pub mod srv; mod utils; +pub use utils::Xyz; #[cfg(test)] #[path = "utils/test_utils.rs"] @@ -26,10 +30,10 @@ mod test_utils; // Must make it accessible as carte::Env from both places when testing. #[cfg(test)] pub use crate::args::Env; -pub use crate::config::{read_config, Config}; -pub use crate::source::{Source, Sources, Xyz}; +pub use crate::config::{read_config, Config, ServerState}; +pub use crate::source::Source; pub use crate::utils::{ - decode_brotli, decode_gzip, BoolOrObject, Error, IdResolver, OneOrMany, Result, + decode_brotli, decode_gzip, Error, IdResolver, OptBoolObj, OptOneMany, Result, }; // Ensure README.md contains valid code diff --git a/martin/src/mbtiles/mod.rs b/martin/src/mbtiles/mod.rs index 79dbb50e3..ea4c84796 100644 --- a/martin/src/mbtiles/mod.rs +++ b/martin/src/mbtiles/mod.rs @@ -5,14 +5,13 @@ use std::sync::Arc; use async_trait::async_trait; use log::trace; -use martin_mbtiles::MbtilesPool; use martin_tile_utils::TileInfo; +use mbtiles::MbtilesPool; use tilejson::TileJSON; use crate::file_config::FileError; use crate::file_config::FileError::{AquireConnError, InvalidMetadata, IoError}; use crate::source::{Tile, UrlQuery}; -use crate::utils::is_valid_zoom; use crate::{Error, Source, Xyz}; #[derive(Clone)] @@ -66,8 +65,12 @@ impl MbtSource { #[async_trait] impl Source for MbtSource { - fn get_tilejson(&self) -> TileJSON { - self.tilejson.clone() + fn get_id(&self) -> &str { + &self.id + } + + fn get_tilejson(&self) -> &TileJSON { + &self.tilejson } fn get_tile_info(&self) -> TileInfo { @@ -78,14 +81,6 @@ impl Source for MbtSource { Box::new(self.clone()) } - fn is_valid_zoom(&self, zoom: u8) -> bool { - is_valid_zoom(zoom, self.tilejson.minzoom, self.tilejson.maxzoom) - } - - fn support_url_query(&self) -> bool { - false - } - async fn get_tile(&self, xyz: &Xyz, _url_query: &Option) -> Result { if let Some(tile) = self .mbtiles diff --git a/martin/src/pg/config.rs b/martin/src/pg/config.rs index 8d4207538..5dfa3c1e8 100644 --- a/martin/src/pg/config.rs +++ b/martin/src/pg/config.rs @@ -1,14 +1,19 @@ +use std::ops::Add; +use std::time::Duration; + use futures::future::try_join; +use log::warn; use serde::{Deserialize, Serialize}; use tilejson::TileJSON; +use crate::args::{BoundsCalcType, DEFAULT_BOUNDS_TIMEOUT}; use crate::config::{copy_unrecognized_config, UnrecognizedValues}; use crate::pg::config_function::FuncInfoSources; use crate::pg::config_table::TableInfoSources; use crate::pg::configurator::PgBuilder; use crate::pg::Result; -use crate::source::Sources; -use crate::utils::{sorted_opt_map, BoolOrObject, IdResolver, OneOrMany}; +use crate::source::TileInfoSources; +use crate::utils::{on_slow, sorted_opt_map, IdResolver, OptBoolObj, OptOneMany}; pub trait PgInfo { fn format_id(&self) -> String; @@ -18,18 +23,15 @@ pub trait PgInfo { #[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize)] pub struct PgSslCerts { /// Same as PGSSLCERT - /// https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-CONNECT-SSLCERT - #[cfg(feature = "ssl")] + /// ([docs](https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-CONNECT-SSLCERT)) #[serde(skip_serializing_if = "Option::is_none")] pub ssl_cert: Option, /// Same as PGSSLKEY - /// https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-CONNECT-SSLKEY - #[cfg(feature = "ssl")] + /// ([docs](https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-CONNECT-SSLKEY)) #[serde(skip_serializing_if = "Option::is_none")] pub ssl_key: Option, /// Same as PGSSLROOTCERT - /// https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-CONNECT-SSLROOTCERT - #[cfg(feature = "ssl")] + /// ([docs](https://www.postgresql.org/docs/current/libpq-connect.html#LIBPQ-CONNECT-SSLROOTCERT)) #[serde(skip_serializing_if = "Option::is_none")] pub ssl_root_cert: Option, } @@ -42,13 +44,13 @@ pub struct PgConfig { #[serde(skip_serializing_if = "Option::is_none")] pub default_srid: Option, #[serde(skip_serializing_if = "Option::is_none")] - pub disable_bounds: Option, + pub auto_bounds: Option, #[serde(skip_serializing_if = "Option::is_none")] pub max_feature_count: Option, #[serde(skip_serializing_if = "Option::is_none")] pub pool_size: Option, - #[serde(skip_serializing_if = "Option::is_none")] - pub auto_publish: Option>, + #[serde(default, skip_serializing_if = "OptBoolObj::is_none")] + pub auto_publish: OptBoolObj, #[serde(skip_serializing_if = "Option::is_none")] #[serde(serialize_with = "sorted_opt_map")] pub tables: Option, @@ -59,29 +61,29 @@ pub struct PgConfig { #[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize)] pub struct PgCfgPublish { - #[serde(skip_serializing_if = "Option::is_none")] + #[serde(default, skip_serializing_if = "OptOneMany::is_none")] #[serde(alias = "from_schema")] - pub from_schemas: Option>, - #[serde(skip_serializing_if = "Option::is_none")] - pub tables: Option>, - #[serde(skip_serializing_if = "Option::is_none")] - pub functions: Option>, + pub from_schemas: OptOneMany, + #[serde(default, skip_serializing_if = "OptBoolObj::is_none")] + pub tables: OptBoolObj, + #[serde(default, skip_serializing_if = "OptBoolObj::is_none")] + pub functions: OptBoolObj, } #[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize)] -pub struct PgCfgPublishType { - #[serde(skip_serializing_if = "Option::is_none")] +pub struct PgCfgPublishTables { + #[serde(default, skip_serializing_if = "OptOneMany::is_none")] #[serde(alias = "from_schema")] - pub from_schemas: Option>, + pub from_schemas: OptOneMany, #[serde(skip_serializing_if = "Option::is_none")] #[serde(alias = "id_format")] pub source_id_format: Option, /// A table column to use as the feature ID /// If a table has no column with this name, `id_column` will not be set for that table. /// If a list of strings is given, the first found column will be treated as a feature ID. - #[serde(skip_serializing_if = "Option::is_none")] + #[serde(default, skip_serializing_if = "OptOneMany::is_none")] #[serde(alias = "id_column")] - pub id_columns: Option>, + pub id_columns: OptOneMany, #[serde(skip_serializing_if = "Option::is_none")] pub clip_geom: Option, #[serde(skip_serializing_if = "Option::is_none")] @@ -90,6 +92,16 @@ pub struct PgCfgPublishType { pub extent: Option, } +#[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize)] +pub struct PgCfgPublishFuncs { + #[serde(default, skip_serializing_if = "OptOneMany::is_none")] + #[serde(alias = "from_schema")] + pub from_schemas: OptOneMany, + #[serde(skip_serializing_if = "Option::is_none")] + #[serde(alias = "id_format")] + pub source_id_format: Option, +} + impl PgConfig { /// Apply defaults to the config, and validate if there is a connection string pub fn finalize(&mut self) -> Result { @@ -105,16 +117,28 @@ impl PgConfig { } } if self.tables.is_none() && self.functions.is_none() && self.auto_publish.is_none() { - self.auto_publish = Some(BoolOrObject::Bool(true)); + self.auto_publish = OptBoolObj::Bool(true); } Ok(res) } - pub async fn resolve(&mut self, id_resolver: IdResolver) -> crate::Result { + pub async fn resolve(&mut self, id_resolver: IdResolver) -> crate::Result { let pg = PgBuilder::new(self, id_resolver).await?; + let inst_tables = on_slow( + pg.instantiate_tables(), + // warn only if default bounds timeout has already passed + DEFAULT_BOUNDS_TIMEOUT.add(Duration::from_secs(1)), + || { + if pg.auto_bounds() == BoundsCalcType::Skip { + warn!("Discovering tables in PostgreSQL database '{}' is taking too long. Make sure your table geo columns have a GIS index, or use '--auto-bounds skip' CLI/config to skip bbox calculation.", pg.get_id()); + } else { + warn!("Discovering tables in PostgreSQL database '{}' is taking too long. Bounds calculation is already disabled. You may need to tune your database.", pg.get_id()); + } + }, + ); let ((mut tables, tbl_info), (funcs, func_info)) = - try_join(pg.instantiate_tables(), pg.instantiate_functions()).await?; + try_join(inst_tables, pg.instantiate_functions()).await?; self.tables = Some(tbl_info); self.functions = Some(func_info); @@ -136,7 +160,7 @@ mod tests { use crate::pg::config_function::FunctionInfo; use crate::pg::config_table::TableInfo; use crate::test_utils::some; - use crate::utils::OneOrMany::{Many, One}; + use crate::utils::OptOneMany::{Many, One}; #[test] fn parse_pg_one() { @@ -146,11 +170,11 @@ mod tests { connection_string: 'postgresql://postgres@localhost/db' "}, &Config { - postgres: Some(One(PgConfig { + postgres: One(PgConfig { connection_string: some("postgresql://postgres@localhost/db"), - auto_publish: Some(BoolOrObject::Bool(true)), + auto_publish: OptBoolObj::Bool(true), ..Default::default() - })), + }), ..Default::default() }, ); @@ -165,18 +189,18 @@ mod tests { - connection_string: 'postgresql://postgres@localhost:5433/db' "}, &Config { - postgres: Some(Many(vec![ + postgres: Many(vec![ PgConfig { connection_string: some("postgres://postgres@localhost:5432/db"), - auto_publish: Some(BoolOrObject::Bool(true)), + auto_publish: OptBoolObj::Bool(true), ..Default::default() }, PgConfig { connection_string: some("postgresql://postgres@localhost:5433/db"), - auto_publish: Some(BoolOrObject::Bool(true)), + auto_publish: OptBoolObj::Bool(true), ..Default::default() }, - ])), + ]), ..Default::default() }, ); @@ -218,7 +242,7 @@ mod tests { bounds: [-180.0, -90.0, 180.0, 90.0] "}, &Config { - postgres: Some(One(PgConfig { + postgres: One(PgConfig { connection_string: some("postgres://postgres@localhost:5432/db"), default_srid: Some(4326), pool_size: Some(20), @@ -255,7 +279,7 @@ mod tests { ), )])), ..Default::default() - })), + }), ..Default::default() }, ); diff --git a/martin/src/pg/configurator.rs b/martin/src/pg/configurator.rs old mode 100755 new mode 100644 index d5fbf77d0..cefbdba40 --- a/martin/src/pg/configurator.rs +++ b/martin/src/pg/configurator.rs @@ -5,6 +5,7 @@ use futures::future::join_all; use itertools::Itertools; use log::{debug, error, info, warn}; +use crate::args::BoundsCalcType; use crate::pg::config::{PgConfig, PgInfo}; use crate::pg::config_function::{FuncInfoSources, FunctionInfo}; use crate::pg::config_table::{TableInfo, TableInfoSources}; @@ -16,20 +17,42 @@ use crate::pg::table_source::{ }; use crate::pg::utils::{find_info, find_kv_ignore_case, normalize_key, InfoMap}; use crate::pg::PgError::InvalidTableExtent; -use crate::pg::Result; -use crate::source::Sources; -use crate::utils::{BoolOrObject, IdResolver, OneOrMany}; +use crate::pg::{PgCfgPublish, PgCfgPublishFuncs, Result}; +use crate::source::TileInfoSources; +use crate::utils::IdResolver; +use crate::utils::OptOneMany::NoVals; +use crate::OptBoolObj::{Bool, NoValue, Object}; pub type SqlFuncInfoMapMap = InfoMap>; pub type SqlTableInfoMapMapMap = InfoMap>>; #[derive(Debug, PartialEq)] -pub struct PgBuilderAuto { +#[cfg_attr(test, derive(serde::Serialize))] +pub struct PgBuilderFuncs { + #[cfg_attr(test, serde(skip_serializing_if = "Option::is_none"))] + schemas: Option>, source_id_format: String, +} + +#[derive(Debug, Default, PartialEq)] +#[cfg_attr(test, derive(serde::Serialize))] +pub struct PgBuilderTables { + #[cfg_attr( + test, + serde( + skip_serializing_if = "Option::is_none", + serialize_with = "crate::utils::sorted_opt_set" + ) + )] schemas: Option>, + source_id_format: String, + #[cfg_attr(test, serde(skip_serializing_if = "Option::is_none"))] id_columns: Option>, + #[cfg_attr(test, serde(skip_serializing_if = "Option::is_none"))] clip_geom: Option, + #[cfg_attr(test, serde(skip_serializing_if = "Option::is_none"))] buffer: Option, + #[cfg_attr(test, serde(skip_serializing_if = "Option::is_none"))] extent: Option, } @@ -37,35 +60,65 @@ pub struct PgBuilderAuto { pub struct PgBuilder { pool: PgPool, default_srid: Option, - disable_bounds: bool, + auto_bounds: BoundsCalcType, max_feature_count: Option, - auto_functions: Option, - auto_tables: Option, + auto_functions: Option, + auto_tables: Option, id_resolver: IdResolver, tables: TableInfoSources, functions: FuncInfoSources, } +/// Combine `from_schema` field from the `config.auto_publish` and `config.auto_publish.tables/functions` +macro_rules! get_auto_schemas { + ($config:expr, $typ:ident) => { + if let Object(v) = &$config.auto_publish { + match (&v.from_schemas, &v.$typ) { + (NoVals, NoValue | Bool(_)) => None, + (v, NoValue | Bool(_)) => v.opt_iter().map(|v| v.cloned().collect()), + (NoVals, Object(v)) => v.from_schemas.opt_iter().map(|v| v.cloned().collect()), + (v, Object(v2)) => { + let mut vals: HashSet<_> = v.iter().cloned().collect(); + vals.extend(v2.from_schemas.iter().cloned()); + Some(vals) + } + } + } else { + None + } + }; +} + impl PgBuilder { pub async fn new(config: &PgConfig, id_resolver: IdResolver) -> Result { let pool = PgPool::new(config).await?; + let (auto_tables, auto_functions) = calc_auto(config); + Ok(Self { pool, default_srid: config.default_srid, - disable_bounds: config.disable_bounds.unwrap_or_default(), + auto_bounds: config.auto_bounds.unwrap_or_default(), max_feature_count: config.max_feature_count, id_resolver, tables: config.tables.clone().unwrap_or_default(), functions: config.functions.clone().unwrap_or_default(), - auto_functions: new_auto_publish(config, true), - auto_tables: new_auto_publish(config, false), + auto_functions, + auto_tables, }) } + pub fn auto_bounds(&self) -> BoundsCalcType { + self.auto_bounds + } + + pub fn get_id(&self) -> &str { + self.pool.get_id() + } + // FIXME: this function has gotten too long due to the new formatting rules, need to be refactored #[allow(clippy::too_many_lines)] - pub async fn instantiate_tables(&self) -> Result<(Sources, TableInfoSources)> { + pub async fn instantiate_tables(&self) -> Result<(TileInfoSources, TableInfoSources)> { let mut db_tables_info = query_available_tables(&self.pool).await?; // Match configured sources with the discovered ones and add them to the pending list. @@ -108,7 +161,7 @@ impl PgBuilder { id2, merged_inf, self.pool.clone(), - self.disable_bounds, + self.auto_bounds, self.max_feature_count, )); } @@ -154,7 +207,7 @@ impl PgBuilder { id2, db_inf, self.pool.clone(), - self.disable_bounds, + self.auto_bounds, self.max_feature_count, )); } @@ -162,7 +215,7 @@ impl PgBuilder { } } - let mut res = Sources::default(); + let mut res = TileInfoSources::default(); let mut info_map = TableInfoSources::new(); let pending = join_all(pending).await; for src in pending { @@ -182,9 +235,9 @@ impl PgBuilder { Ok((res, info_map)) } - pub async fn instantiate_functions(&self) -> Result<(Sources, FuncInfoSources)> { + pub async fn instantiate_functions(&self) -> Result<(TileInfoSources, FuncInfoSources)> { let mut db_funcs_info = query_available_function(&self.pool).await?; - let mut res = Sources::default(); + let mut res = TileInfoSources::default(); let mut info_map = FuncInfoSources::new(); let mut used = HashSet::<(&str, &str)>::new(); @@ -254,18 +307,20 @@ impl PgBuilder { self.id_resolver.resolve(id, signature) } - fn add_func_src(&self, sources: &mut Sources, id: String, info: &impl PgInfo, sql: PgSqlInfo) { - let source = PgSource::new( - id.clone(), - sql, - info.to_tilejson(id.clone()), - self.pool.clone(), - ); - sources.insert(id, Box::new(source)); + fn add_func_src( + &self, + sources: &mut TileInfoSources, + id: String, + info: &impl PgInfo, + sql: PgSqlInfo, + ) { + let tilejson = info.to_tilejson(id.clone()); + let source = PgSource::new(id, sql, tilejson, self.pool.clone()); + sources.push(Box::new(source)); } } -fn update_auto_fields(id: &str, inf: &mut TableInfo, auto_tables: &PgBuilderAuto) { +fn update_auto_fields(id: &str, inf: &mut TableInfo, auto_tables: &PgBuilderTables) { if inf.clip_geom.is_none() { inf.clip_geom = auto_tables.clip_geom; } @@ -323,83 +378,82 @@ fn update_auto_fields(id: &str, inf: &mut TableInfo, auto_tables: &PgBuilderAuto ); } -fn new_auto_publish(config: &PgConfig, is_function: bool) -> Option { - let default_id_fmt = |is_func| (if is_func { "{function}" } else { "{table}" }).to_string(); - let default = |schemas| { - Some(PgBuilderAuto { - source_id_format: default_id_fmt(is_function), - schemas, - id_columns: None, - clip_geom: None, - buffer: None, - extent: None, - }) +fn calc_auto(config: &PgConfig) -> (Option, Option) { + let auto_tables = if use_auto_publish(config, false) { + let schemas = get_auto_schemas!(config, tables); + let bld = if let Object(PgCfgPublish { + tables: Object(v), .. + }) = &config.auto_publish + { + PgBuilderTables { + schemas, + source_id_format: v + .source_id_format + .as_deref() + .unwrap_or("{table}") + .to_string(), + id_columns: v.id_columns.opt_iter().map(|v| v.cloned().collect()), + clip_geom: v.clip_geom, + buffer: v.buffer, + extent: v.extent, + } + } else { + PgBuilderTables { + schemas, + source_id_format: "{table}".to_string(), + ..Default::default() + } + }; + Some(bld) + } else { + None }; - if let Some(bo_a) = &config.auto_publish { - match bo_a { - BoolOrObject::Object(a) => match if is_function { &a.functions } else { &a.tables } { - Some(bo_i) => match bo_i { - BoolOrObject::Object(item) => Some(PgBuilderAuto { - source_id_format: item - .source_id_format - .as_ref() - .cloned() - .unwrap_or_else(|| default_id_fmt(is_function)), - schemas: merge_opt_hs(&a.from_schemas, &item.from_schemas), - id_columns: item.id_columns.as_ref().and_then(|ids| { - if is_function { - error!("Configuration parameter auto_publish.functions.id_columns is not supported"); - None - } else { - Some(ids.iter().cloned().collect()) - } - }), - clip_geom: { - if is_function { - error!("Configuration parameter auto_publish.functions.clip_geom is not supported"); - None - } else { - item.clip_geom - } - }, - buffer: { - if is_function { - error!("Configuration parameter auto_publish.functions.buffer is not supported"); - None - } else { - item.buffer - } - }, - extent: { - if is_function { - error!("Configuration parameter auto_publish.functions.extent is not supported"); - None - } else { - item.extent - } - }, + let auto_functions = if use_auto_publish(config, true) { + Some(PgBuilderFuncs { + schemas: get_auto_schemas!(config, functions), + source_id_format: if let Object(PgCfgPublish { + functions: + Object(PgCfgPublishFuncs { + source_id_format: Some(v), + .. }), - BoolOrObject::Bool(true) => default(merge_opt_hs(&a.from_schemas, &None)), - BoolOrObject::Bool(false) => None, - }, + .. + }) = &config.auto_publish + { + v.clone() + } else { + "{function}".to_string() + }, + }) + } else { + None + }; + + (auto_tables, auto_functions) +} + +fn use_auto_publish(config: &PgConfig, for_functions: bool) -> bool { + match &config.auto_publish { + NoValue => config.tables.is_none() && config.functions.is_none(), + Object(funcs) => { + if for_functions { // If auto_publish.functions is set, and currently asking for .tables which is missing, // .tables becomes the inverse of functions (i.e. an obj or true in tables means false in functions) - None => match if is_function { &a.tables } else { &a.functions } { - Some(bo_i) => match bo_i { - BoolOrObject::Object(_) | BoolOrObject::Bool(true) => None, - BoolOrObject::Bool(false) => default(merge_opt_hs(&a.from_schemas, &None)), - }, - None => default(merge_opt_hs(&a.from_schemas, &None)), - }, - }, - BoolOrObject::Bool(true) => default(None), - BoolOrObject::Bool(false) => None, + match &funcs.functions { + NoValue => matches!(funcs.tables, NoValue | Bool(false)), + Object(_) => true, + Bool(v) => *v, + } + } else { + match &funcs.tables { + NoValue => matches!(funcs.functions, NoValue | Bool(false)), + Object(_) => true, + Bool(v) => *v, + } + } } - } else if config.tables.is_some() || config.functions.is_some() { - None - } else { - default(None) + Bool(v) => *v, } } @@ -432,142 +486,167 @@ fn by_key(a: &(String, T), b: &(String, T)) -> Ordering { a.0.cmp(&b.0) } -/// Merge two optional list of strings into a hashset -fn merge_opt_hs( - a: &Option>, - b: &Option>, -) -> Option> { - if let Some(a) = a { - let mut res: HashSet<_> = a.iter().cloned().collect(); - if let Some(b) = b { - res.extend(b.iter().cloned()); - } - Some(res) - } else { - b.as_ref().map(|b| b.iter().cloned().collect()) - } -} - #[cfg(test)] mod tests { use indoc::indoc; + use insta::assert_yaml_snapshot; use super::*; - #[allow(clippy::unnecessary_wraps)] - fn builder(source_id_format: &str, schemas: Option<&[&str]>) -> Option { - Some(PgBuilderAuto { - source_id_format: source_id_format.to_string(), - schemas: schemas.map(|s| s.iter().map(|s| (*s).to_string()).collect()), - id_columns: None, - clip_geom: None, - buffer: None, - extent: None, - }) + #[derive(serde::Serialize)] + struct AutoCfg { + auto_table: Option, + auto_funcs: Option, } - - fn parse_yaml(content: &str) -> PgConfig { - serde_yaml::from_str(content).unwrap() + fn auto(content: &str) -> AutoCfg { + let cfg: PgConfig = serde_yaml::from_str(content).unwrap(); + let (auto_table, auto_funcs) = calc_auto(&cfg); + AutoCfg { + auto_table, + auto_funcs, + } } #[test] + #[allow(clippy::too_many_lines)] fn test_auto_publish_no_auto() { - let config = parse_yaml("{}"); - let res = new_auto_publish(&config, false); - assert_eq!(res, builder("{table}", None)); - let res = new_auto_publish(&config, true); - assert_eq!(res, builder("{function}", None)); - - let config = parse_yaml("tables: {}"); - assert_eq!(new_auto_publish(&config, false), None); - assert_eq!(new_auto_publish(&config, true), None); - - let config = parse_yaml("functions: {}"); - assert_eq!(new_auto_publish(&config, false), None); - assert_eq!(new_auto_publish(&config, true), None); - } - - #[test] - fn test_auto_publish_bool() { - let config = parse_yaml("auto_publish: true"); - let res = new_auto_publish(&config, false); - assert_eq!(res, builder("{table}", None)); - let res = new_auto_publish(&config, true); - assert_eq!(res, builder("{function}", None)); - - let config = parse_yaml("auto_publish: false"); - assert_eq!(new_auto_publish(&config, false), None); - assert_eq!(new_auto_publish(&config, true), None); - } - - #[test] - fn test_auto_publish_obj_bool() { - let config = parse_yaml(indoc! {" + let cfg = auto("{}"); + assert_yaml_snapshot!(cfg, @r###" + --- + auto_table: + source_id_format: "{table}" + auto_funcs: + source_id_format: "{function}" + "###); + + let cfg = auto("tables: {}"); + assert_yaml_snapshot!(cfg, @r###" + --- + auto_table: ~ + auto_funcs: ~ + "###); + + let cfg = auto("functions: {}"); + assert_yaml_snapshot!(cfg, @r###" + --- + auto_table: ~ + auto_funcs: ~ + "###); + + let cfg = auto("auto_publish: true"); + assert_yaml_snapshot!(cfg, @r###" + --- + auto_table: + source_id_format: "{table}" + auto_funcs: + source_id_format: "{function}" + "###); + + let cfg = auto("auto_publish: false"); + assert_yaml_snapshot!(cfg, @r###" + --- + auto_table: ~ + auto_funcs: ~ + "###); + + let cfg = auto(indoc! {" auto_publish: from_schemas: public tables: true"}); - let res = new_auto_publish(&config, false); - assert_eq!(res, builder("{table}", Some(&["public"]))); - assert_eq!(new_auto_publish(&config, true), None); - - let config = parse_yaml(indoc! {" + assert_yaml_snapshot!(cfg, @r###" + --- + auto_table: + schemas: + - public + source_id_format: "{table}" + auto_funcs: ~ + "###); + + let cfg = auto(indoc! {" auto_publish: from_schemas: public functions: true"}); - assert_eq!(new_auto_publish(&config, false), None); - let res = new_auto_publish(&config, true); - assert_eq!(res, builder("{function}", Some(&["public"]))); - - let config = parse_yaml(indoc! {" + assert_yaml_snapshot!(cfg, @r###" + --- + auto_table: ~ + auto_funcs: + schemas: + - public + source_id_format: "{function}" + "###); + + let cfg = auto(indoc! {" auto_publish: from_schemas: public tables: false"}); - assert_eq!(new_auto_publish(&config, false), None); - let res = new_auto_publish(&config, true); - assert_eq!(res, builder("{function}", Some(&["public"]))); - - let config = parse_yaml(indoc! {" + assert_yaml_snapshot!(cfg, @r###" + --- + auto_table: ~ + auto_funcs: + schemas: + - public + source_id_format: "{function}" + "###); + + let cfg = auto(indoc! {" auto_publish: from_schemas: public functions: false"}); - let res = new_auto_publish(&config, false); - assert_eq!(res, builder("{table}", Some(&["public"]))); - assert_eq!(new_auto_publish(&config, true), None); - } - - #[test] - fn test_auto_publish_obj_obj() { - let config = parse_yaml(indoc! {" + assert_yaml_snapshot!(cfg, @r###" + --- + auto_table: + schemas: + - public + source_id_format: "{table}" + auto_funcs: ~ + "###); + + let cfg = auto(indoc! {" auto_publish: from_schemas: public tables: from_schemas: osm id_format: 'foo_{schema}.{table}_bar'"}); - let res = new_auto_publish(&config, false); - assert_eq!( - res, - builder("foo_{schema}.{table}_bar", Some(&["public", "osm"])) - ); - assert_eq!(new_auto_publish(&config, true), None); - - let config = parse_yaml(indoc! {" + assert_yaml_snapshot!(cfg, @r###" + --- + auto_table: + schemas: + - osm + - public + source_id_format: "foo_{schema}.{table}_bar" + auto_funcs: ~ + "###); + + let cfg = auto(indoc! {" auto_publish: from_schemas: public tables: from_schemas: osm source_id_format: '{schema}.{table}'"}); - let res = new_auto_publish(&config, false); - assert_eq!(res, builder("{schema}.{table}", Some(&["public", "osm"]))); - assert_eq!(new_auto_publish(&config, true), None); - - let config = parse_yaml(indoc! {" + assert_yaml_snapshot!(cfg, @r###" + --- + auto_table: + schemas: + - osm + - public + source_id_format: "{schema}.{table}" + auto_funcs: ~ + "###); + + let cfg = auto(indoc! {" auto_publish: tables: from_schemas: - osm - public"}); - let res = new_auto_publish(&config, false); - assert_eq!(res, builder("{table}", Some(&["public", "osm"]))); - assert_eq!(new_auto_publish(&config, true), None); + assert_yaml_snapshot!(cfg, @r###" + --- + auto_table: + schemas: + - osm + - public + source_id_format: "{table}" + auto_funcs: ~ + "###); } } diff --git a/martin/src/pg/errors.rs b/martin/src/pg/errors.rs index b1e9aa3de..3bf537b5e 100644 --- a/martin/src/pg/errors.rs +++ b/martin/src/pg/errors.rs @@ -1,4 +1,7 @@ -use deadpool_postgres::tokio_postgres::Error; +use std::io; +use std::path::PathBuf; + +use deadpool_postgres::tokio_postgres::Error as TokioPgError; use deadpool_postgres::{BuildError, PoolError}; use semver::Version; @@ -10,28 +13,29 @@ pub type Result = std::result::Result; #[derive(thiserror::Error, Debug)] pub enum PgError { - #[cfg(feature = "ssl")] - #[error("Can't build TLS connection: {0}")] - BuildSslConnectorError(#[from] openssl::error::ErrorStack), + #[error("Cannot load platform root certificates: {0}")] + CannotLoadRoots(#[source] io::Error), + + #[error("Cannot open certificate file {}: {0}", .1.display())] + CannotOpenCert(#[source] io::Error, PathBuf), + + #[error("Cannot parse certificate file {}: {0}", .1.display())] + CannotParseCert(#[source] io::Error, PathBuf), - #[cfg(feature = "ssl")] - #[error("Can't set trusted root certificate {}: {0}", .1.display())] - BadTrustedRootCertError(#[source] openssl::error::ErrorStack, std::path::PathBuf), + #[error("Unable to parse PEM RSA key file {}", .0.display())] + InvalidPrivateKey(PathBuf), - #[cfg(feature = "ssl")] - #[error("Can't set client certificate {}: {0}", .1.display())] - BadClientCertError(#[source] openssl::error::ErrorStack, std::path::PathBuf), + #[error("Unable to use client certificate pair {} / {}: {0}", .1.display(), .2.display())] + CannotUseClientKey(#[source] rustls::Error, PathBuf, PathBuf), - #[cfg(feature = "ssl")] - #[error("Can't set client certificate key {}: {0}", .1.display())] - BadClientKeyError(#[source] openssl::error::ErrorStack, std::path::PathBuf), + #[error("Rustls Error: {0:?}")] + RustlsError(#[from] rustls::Error), - #[cfg(feature = "ssl")] #[error("Unknown SSL mode: {0:?}")] UnknownSslMode(deadpool_postgres::tokio_postgres::config::SslMode), #[error("Postgres error while {1}: {0}")] - PostgresError(#[source] Error, &'static str), + PostgresError(#[source] TokioPgError, &'static str), #[error("Unable to build a Postgres connection pool {1}: {0}")] PostgresPoolBuildError(#[source] BuildError, String), @@ -40,7 +44,7 @@ pub enum PgError { PostgresPoolConnError(#[source] PoolError, String), #[error("Unable to parse connection string {1}: {0}")] - BadConnectionString(#[source] Error, String), + BadConnectionString(#[source] TokioPgError, String), #[error("Unable to parse PostGIS version {1}: {0}")] BadPostgisVersion(#[source] semver::Error, String), @@ -52,11 +56,11 @@ pub enum PgError { InvalidTableExtent(String, String), #[error("Error preparing a query for the tile '{1}' ({2}): {3} {0}")] - PrepareQueryError(#[source] Error, String, String, String), + PrepareQueryError(#[source] TokioPgError, String, String, String), #[error(r#"Unable to get tile {2:#} from {1}: {0}"#)] - GetTileError(#[source] Error, String, Xyz), + GetTileError(#[source] TokioPgError, String, Xyz), #[error(r#"Unable to get tile {2:#} with {:?} params from {1}: {0}"#, query_to_json(.3))] - GetTileWithQueryError(#[source] Error, String, Xyz, UrlQuery), + GetTileWithQueryError(#[source] TokioPgError, String, Xyz, UrlQuery), } diff --git a/martin/src/pg/mod.rs b/martin/src/pg/mod.rs index 3437322e7..35204208b 100644 --- a/martin/src/pg/mod.rs +++ b/martin/src/pg/mod.rs @@ -10,11 +10,9 @@ mod table_source; mod tls; mod utils; -pub use config::{PgCfgPublish, PgCfgPublishType, PgConfig, PgSslCerts}; +pub use config::{PgCfgPublish, PgCfgPublishFuncs, PgCfgPublishTables, PgConfig, PgSslCerts}; pub use config_function::FunctionInfo; pub use config_table::TableInfo; pub use errors::{PgError, Result}; pub use function_source::query_available_function; pub use pool::{PgPool, POOL_SIZE_DEFAULT}; - -pub use crate::utils::BoolOrObject; diff --git a/martin/src/pg/pg_source.rs b/martin/src/pg/pg_source.rs index 05d7090ea..735d747af 100644 --- a/martin/src/pg/pg_source.rs +++ b/martin/src/pg/pg_source.rs @@ -11,8 +11,8 @@ use tilejson::TileJSON; use crate::pg::pool::PgPool; use crate::pg::utils::query_to_json; use crate::pg::PgError::{GetTileError, GetTileWithQueryError, PrepareQueryError}; -use crate::source::{Source, Tile, UrlQuery, Xyz}; -use crate::utils::{is_valid_zoom, Result}; +use crate::source::{Source, Tile, UrlQuery}; +use crate::{Result, Xyz}; #[derive(Clone, Debug)] pub struct PgSource { @@ -36,8 +36,12 @@ impl PgSource { #[async_trait] impl Source for PgSource { - fn get_tilejson(&self) -> TileJSON { - self.tilejson.clone() + fn get_id(&self) -> &str { + &self.id + } + + fn get_tilejson(&self) -> &TileJSON { + &self.tilejson } fn get_tile_info(&self) -> TileInfo { @@ -48,10 +52,6 @@ impl Source for PgSource { Box::new(self.clone()) } - fn is_valid_zoom(&self, zoom: u8) -> bool { - is_valid_zoom(zoom, self.tilejson.minzoom, self.tilejson.maxzoom) - } - fn support_url_query(&self) -> bool { self.info.use_url_query } diff --git a/martin/src/pg/pool.rs b/martin/src/pg/pool.rs index 4ebceb572..ba3cb0b38 100755 --- a/martin/src/pg/pool.rs +++ b/martin/src/pg/pool.rs @@ -1,9 +1,10 @@ use deadpool_postgres::{Manager, ManagerConfig, Object, Pool, RecyclingMethod}; use log::{info, warn}; +use postgres::config::SslMode; use semver::Version; use crate::pg::config::PgConfig; -use crate::pg::tls::{make_connector, parse_conn_str}; +use crate::pg::tls::{make_connector, parse_conn_str, SslModeOverride}; use crate::pg::PgError::{ BadPostgisVersion, PostgisTooOld, PostgresError, PostgresPoolBuildError, PostgresPoolConnError, }; @@ -27,21 +28,8 @@ pub struct PgPool { impl PgPool { pub async fn new(config: &PgConfig) -> Result { - let conn_str = config.connection_string.as_ref().unwrap().as_str(); - info!("Connecting to {conn_str}"); - let (pg_cfg, ssl_mode) = parse_conn_str(conn_str)?; - - let id = pg_cfg.get_dbname().map_or_else( - || format!("{:?}", pg_cfg.get_hosts()[0]), - ToString::to_string, - ); + let (id, mgr) = Self::parse_config(config)?; - let connector = make_connector(&config.ssl_certificates, ssl_mode)?; - - let mgr_config = ManagerConfig { - recycling_method: RecyclingMethod::Fast, - }; - let mgr = Manager::from_config(pg_cfg, connector, mgr_config); let pool = Pool::builder(mgr) .max_size(config.pool_size.unwrap_or(POOL_SIZE_DEFAULT)) .build() @@ -76,6 +64,42 @@ SELECT Ok(Self { id, pool, margin }) } + fn parse_config(config: &PgConfig) -> Result<(String, Manager)> { + let conn_str = config.connection_string.as_ref().unwrap().as_str(); + let (pg_cfg, ssl_mode) = parse_conn_str(conn_str)?; + + let id = pg_cfg.get_dbname().map_or_else( + || format!("{:?}", pg_cfg.get_hosts()[0]), + ToString::to_string, + ); + + let mgr_config = ManagerConfig { + recycling_method: RecyclingMethod::Fast, + }; + + let mgr = if pg_cfg.get_ssl_mode() == SslMode::Disable { + info!("Connecting without SSL support: {pg_cfg:?}"); + let connector = deadpool_postgres::tokio_postgres::NoTls {}; + Manager::from_config(pg_cfg, connector, mgr_config) + } else { + match ssl_mode { + SslModeOverride::Unmodified(_) => { + info!("Connecting with SSL support: {pg_cfg:?}"); + } + SslModeOverride::VerifyCa => { + info!("Using sslmode=verify-ca to connect: {pg_cfg:?}"); + } + SslModeOverride::VerifyFull => { + info!("Using sslmode=verify-full to connect: {pg_cfg:?}"); + } + }; + let connector = make_connector(&config.ssl_certificates, ssl_mode)?; + Manager::from_config(pg_cfg, connector, mgr_config) + }; + + Ok((id, mgr)) + } + pub async fn get(&self) -> Result { get_conn(&self.pool, self.id.as_str()).await } diff --git a/martin/src/pg/scripts/query_available_function.sql b/martin/src/pg/scripts/query_available_function.sql index 00d65bf5d..baedff978 100755 --- a/martin/src/pg/scripts/query_available_function.sql +++ b/martin/src/pg/scripts/query_available_function.sql @@ -23,6 +23,7 @@ WITH jsonb_agg(data_type::text ORDER BY ordinal_position) as input_types FROM information_schema.parameters WHERE parameter_mode = 'IN' + AND specific_schema NOT IN ('pg_catalog', 'information_schema') GROUP BY specific_name), -- outputs AS ( @@ -32,6 +33,7 @@ WITH jsonb_agg(parameter_name::text ORDER BY ordinal_position) as out_names FROM information_schema.parameters WHERE parameter_mode = 'OUT' + AND specific_schema NOT IN ('pg_catalog', 'information_schema') GROUP BY specific_name), -- comments AS ( diff --git a/martin/src/pg/table_source.rs b/martin/src/pg/table_source.rs index 9fe61a913..5b95b58f7 100644 --- a/martin/src/pg/table_source.rs +++ b/martin/src/pg/table_source.rs @@ -1,11 +1,14 @@ use std::collections::HashMap; +use futures::pin_mut; use log::{debug, info, warn}; use postgis::ewkb; use postgres_protocol::escape::{escape_identifier, escape_literal}; use serde_json::Value; use tilejson::Bounds; +use tokio::time::timeout; +use crate::args::{BoundsCalcType, DEFAULT_BOUNDS_TIMEOUT}; use crate::pg::config::PgInfo; use crate::pg::config_table::TableInfo; use crate::pg::configurator::SqlTableInfoMapMapMap; @@ -96,7 +99,7 @@ pub async fn table_to_query( id: String, mut info: TableInfo, pool: PgPool, - disable_bounds: bool, + bounds_type: BoundsCalcType, max_feature_count: Option, ) -> Result<(String, PgSqlInfo, TableInfo)> { let schema = escape_identifier(&info.schema); @@ -104,8 +107,26 @@ pub async fn table_to_query( let geometry_column = escape_identifier(&info.geometry_column); let srid = info.srid; - if info.bounds.is_none() && !disable_bounds { - info.bounds = calc_bounds(&pool, &schema, &table, &geometry_column, srid).await?; + if info.bounds.is_none() { + match bounds_type { + BoundsCalcType::Skip => {} + BoundsCalcType::Quick | BoundsCalcType::Calc => { + let bounds = calc_bounds(&pool, &schema, &table, &geometry_column, srid); + if bounds_type == BoundsCalcType::Calc { + info.bounds = bounds.await?; + } else { + pin_mut!(bounds); + if let Ok(bounds) = timeout(DEFAULT_BOUNDS_TIMEOUT, &mut bounds).await { + info.bounds = bounds?; + } else { + warn!( + "Timeout computing {} bounds for {id}, aborting query. Use --auto-bounds=calc to wait until complete, or check the table for missing indices.", + info.format_id(), + ); + } + } + } + } } let properties = if let Some(props) = &info.properties { @@ -261,8 +282,6 @@ pub fn calc_srid( Some(default_srid) } (0, 0, None) => { - // TODO: cleanup - // println!("{:#?}", std::backtrace::Backtrace::force_capture()); let info = "To use this table source, set default or specify this table SRID in the config file, or set the default SRID with --default-srid=..."; warn!("Table {table_id} has SRID=0, skipping. {info}"); None diff --git a/martin/src/pg/tls.rs b/martin/src/pg/tls.rs index 96e0ac35b..f143b9a64 100644 --- a/martin/src/pg/tls.rs +++ b/martin/src/pg/tls.rs @@ -1,20 +1,21 @@ +use std::fs::File; +use std::io::BufReader; +use std::path::PathBuf; use std::str::FromStr; use deadpool_postgres::tokio_postgres::config::SslMode; use deadpool_postgres::tokio_postgres::Config; -#[cfg(feature = "ssl")] use log::{info, warn}; -#[cfg(feature = "ssl")] -use openssl::ssl::SslFiletype; -#[cfg(feature = "ssl")] -use openssl::ssl::{SslConnector, SslMethod, SslVerifyMode}; use regex::Regex; - -use crate::pg::PgError::BadConnectionString; -#[cfg(feature = "ssl")] -use crate::pg::PgError::{BadClientCertError, BadClientKeyError, UnknownSslMode}; -#[cfg(feature = "ssl")] -use crate::pg::PgError::{BadTrustedRootCertError, BuildSslConnectorError}; +use rustls::{Certificate, PrivateKey}; +use rustls_native_certs::load_native_certs; +use rustls_pemfile::Item::RSAKey; +use tokio_postgres_rustls::MakeRustlsConnect; + +use crate::pg::PgError::{ + BadConnectionString, CannotLoadRoots, CannotOpenCert, CannotParseCert, CannotUseClientKey, + InvalidPrivateKey, UnknownSslMode, +}; use crate::pg::{PgSslCerts, Result}; /// A temporary workaround for @@ -50,24 +51,44 @@ pub fn parse_conn_str(conn_str: &str) -> Result<(Config, SslModeOverride)> { Ok((pg_cfg, mode)) } -#[cfg(not(feature = "ssl"))] -#[allow(clippy::unnecessary_wraps)] -pub fn make_connector( - _certs: &PgSslCerts, - _ssl_mode: SslModeOverride, -) -> Result { - Ok(deadpool_postgres::tokio_postgres::NoTls) +struct NoCertificateVerification {} + +impl rustls::client::ServerCertVerifier for NoCertificateVerification { + fn verify_server_cert( + &self, + _end_entity: &Certificate, + _intermediates: &[Certificate], + _server_name: &rustls::ServerName, + _scts: &mut dyn Iterator, + _ocsp: &[u8], + _now: std::time::SystemTime, + ) -> std::result::Result { + Ok(rustls::client::ServerCertVerified::assertion()) + } +} + +fn read_certs(file: &PathBuf) -> Result> { + Ok(rustls_pemfile::certs(&mut cert_reader(file)?) + .map_err(|e| CannotParseCert(e, file.clone()))? + .into_iter() + .map(Certificate) + .collect()) +} + +fn cert_reader(file: &PathBuf) -> Result> { + Ok(BufReader::new( + File::open(file).map_err(|e| CannotOpenCert(e, file.clone()))?, + )) } -#[cfg(feature = "ssl")] pub fn make_connector( - certs: &PgSslCerts, + pg_certs: &PgSslCerts, ssl_mode: SslModeOverride, -) -> Result { - let (verify_ca, verify_hostname) = match ssl_mode { +) -> Result { + let (verify_ca, _verify_hostname) = match ssl_mode { SslModeOverride::Unmodified(mode) => match mode { SslMode::Disable | SslMode::Prefer => (false, false), - SslMode::Require => match certs.ssl_root_cert { + SslMode::Require => match pg_certs.ssl_root_cert { // If a root CA file exists, the behavior of sslmode=require will be the same as // that of verify-ca, meaning the server certificate is validated against the CA. // For more details, check out the note about backwards compatibility in @@ -83,39 +104,57 @@ pub fn make_connector( SslModeOverride::VerifyFull => (true, true), }; - let tls = SslMethod::tls_client(); - let mut builder = SslConnector::builder(tls).map_err(BuildSslConnectorError)?; + let mut roots = rustls::RootCertStore::empty(); - if let (Some(cert), Some(key)) = (&certs.ssl_cert, &certs.ssl_key) { - builder - .set_certificate_file(cert, SslFiletype::PEM) - .map_err(|e| BadClientCertError(e, cert.clone()))?; - builder - .set_private_key_file(key, SslFiletype::PEM) - .map_err(|e| BadClientKeyError(e, key.clone()))?; - } else if certs.ssl_key.is_some() || certs.ssl_key.is_some() { - warn!("SSL client certificate and key files must be set to use client certificate with Postgres. Only one of them was set."); + if let Some(file) = &pg_certs.ssl_root_cert { + for cert in read_certs(file)? { + roots.add(&cert)?; + } + info!("Using {} as a root certificate", file.display()); } - if let Some(file) = &certs.ssl_root_cert { - builder - .set_ca_file(file) - .map_err(|e| BadTrustedRootCertError(e, file.clone()))?; - info!("Using {} as a root certificate", file.display()); + if verify_ca || pg_certs.ssl_root_cert.is_some() || pg_certs.ssl_cert.is_some() { + let certs = load_native_certs().map_err(CannotLoadRoots)?; + for cert in certs { + roots.add(&Certificate(cert.0))?; + } } + let builder = rustls::ClientConfig::builder() + .with_safe_defaults() + .with_root_certificates(roots); + + let mut builder = if let (Some(cert), Some(key)) = (&pg_certs.ssl_cert, &pg_certs.ssl_key) { + match rustls_pemfile::read_one(&mut cert_reader(key)?) + .map_err(|e| CannotParseCert(e, key.clone()))? + { + Some(RSAKey(rsa_key)) => builder + .with_client_auth_cert(read_certs(cert)?, PrivateKey(rsa_key)) + .map_err(|e| CannotUseClientKey(e, cert.clone(), key.clone()))?, + _ => Err(InvalidPrivateKey(key.clone()))?, + } + } else { + if pg_certs.ssl_key.is_some() || pg_certs.ssl_key.is_some() { + warn!("SSL client certificate and key files must be set to use client certificate with Postgres. Only one of them was set."); + } + builder.with_no_client_auth() + }; + if !verify_ca { - builder.set_verify(SslVerifyMode::NONE); + builder + .dangerous() + .set_certificate_verifier(std::sync::Arc::new(NoCertificateVerification {})); } - let mut connector = postgres_openssl::MakeTlsConnector::new(builder.build()); + let connector = MakeRustlsConnect::new(builder); - if !verify_hostname { - connector.set_callback(|cfg, _domain| { - cfg.set_verify_hostname(false); - Ok(()) - }); - } + // TODO: ??? + // if !verify_hostname { + // connector.set_callback(|cfg, _domain| { + // cfg.set_verify_hostname(false); + // Ok(()) + // }); + // } Ok(connector) } diff --git a/martin/src/pmtiles/mod.rs b/martin/src/pmtiles/mod.rs index 98e3a86b6..0b678c928 100644 --- a/martin/src/pmtiles/mod.rs +++ b/martin/src/pmtiles/mod.rs @@ -13,9 +13,8 @@ use tilejson::TileJSON; use crate::file_config::FileError; use crate::file_config::FileError::{InvalidMetadata, IoError}; -use crate::source::{Source, Tile, UrlQuery, Xyz}; -use crate::utils::is_valid_zoom; -use crate::Error; +use crate::source::{Source, Tile, UrlQuery}; +use crate::{Error, Xyz}; #[derive(Clone)] pub struct PmtSource { @@ -114,8 +113,12 @@ impl PmtSource { #[async_trait] impl Source for PmtSource { - fn get_tilejson(&self) -> TileJSON { - self.tilejson.clone() + fn get_id(&self) -> &str { + &self.id + } + + fn get_tilejson(&self) -> &TileJSON { + &self.tilejson } fn get_tile_info(&self) -> TileInfo { @@ -126,14 +129,6 @@ impl Source for PmtSource { Box::new(self.clone()) } - fn is_valid_zoom(&self, zoom: u8) -> bool { - is_valid_zoom(zoom, self.tilejson.minzoom, self.tilejson.maxzoom) - } - - fn support_url_query(&self) -> bool { - false - } - async fn get_tile(&self, xyz: &Xyz, _url_query: &Option) -> Result { // TODO: optimize to return Bytes if let Some(t) = self diff --git a/martin/src/source.rs b/martin/src/source.rs index 0292166fb..2c0e246e1 100644 --- a/martin/src/source.rs +++ b/martin/src/source.rs @@ -1,91 +1,48 @@ use std::collections::{BTreeMap, HashMap}; -use std::fmt::{Debug, Display, Formatter}; +use std::fmt::Debug; use actix_web::error::ErrorNotFound; use async_trait::async_trait; -use itertools::Itertools; use log::debug; use martin_tile_utils::TileInfo; use serde::{Deserialize, Serialize}; use tilejson::TileJSON; -use crate::utils::Result; - -#[derive(Debug, Copy, Clone)] -pub struct Xyz { - pub z: u8, - pub x: u32, - pub y: u32, -} - -impl Display for Xyz { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { - if f.alternate() { - write!(f, "{}/{}/{}", self.z, self.x, self.y) - } else { - write!(f, "{},{},{}", self.z, self.x, self.y) - } - } -} +use crate::{Result, Xyz}; pub type Tile = Vec; pub type UrlQuery = HashMap; -#[derive(Default, Clone)] -pub struct Sources { - tiles: HashMap>, - catalog: SourceCatalog, -} +pub type TileInfoSource = Box; -impl Sources { - #[must_use] - pub fn sort(self) -> Self { - Self { - tiles: self.tiles, - catalog: SourceCatalog { - tiles: self - .catalog - .tiles - .into_iter() - .sorted_by(|a, b| a.0.cmp(&b.0)) - .collect(), - }, - } - } -} +pub type TileInfoSources = Vec; -impl Sources { - pub fn insert(&mut self, id: String, source: Box) { - let tilejson = source.get_tilejson(); - let info = source.get_tile_info(); - self.catalog.tiles.insert( - id.clone(), - SourceEntry { - content_type: info.format.content_type().to_string(), - content_encoding: info.encoding.content_encoding().map(ToString::to_string), - name: tilejson.name.filter(|v| v != &id), - description: tilejson.description, - attribution: tilejson.attribution, - }, - ); - self.tiles.insert(id, source); - } +#[derive(Default, Clone)] +pub struct TileSources(HashMap>); +pub type TileCatalog = BTreeMap; - pub fn extend(&mut self, other: Sources) { - for (k, v) in other.catalog.tiles { - self.catalog.tiles.insert(k, v); - } - self.tiles.extend(other.tiles); +impl TileSources { + #[must_use] + pub fn new(sources: Vec) -> Self { + Self( + sources + .into_iter() + .flatten() + .map(|src| (src.get_id().to_string(), src)) + .collect(), + ) } - #[must_use] - pub fn get_catalog(&self) -> &SourceCatalog { - &self.catalog + pub fn get_catalog(&self) -> TileCatalog { + self.0 + .iter() + .map(|(id, src)| (id.to_string(), src.get_catalog_entry())) + .collect() } pub fn get_source(&self, id: &str) -> actix_web::Result<&dyn Source> { Ok(self - .tiles + .0 .get(id) .ok_or_else(|| ErrorNotFound(format!("Source {id} does not exist")))? .as_ref()) @@ -138,17 +95,38 @@ impl Sources { #[async_trait] pub trait Source: Send + Debug { - fn get_tilejson(&self) -> TileJSON; + fn get_id(&self) -> &str; + + fn get_tilejson(&self) -> &TileJSON; fn get_tile_info(&self) -> TileInfo; fn clone_source(&self) -> Box; - fn is_valid_zoom(&self, zoom: u8) -> bool; - - fn support_url_query(&self) -> bool; + fn support_url_query(&self) -> bool { + false + } async fn get_tile(&self, xyz: &Xyz, query: &Option) -> Result; + + fn is_valid_zoom(&self, zoom: u8) -> bool { + let tj = self.get_tilejson(); + tj.minzoom.map_or(true, |minzoom| zoom >= minzoom) + && tj.maxzoom.map_or(true, |maxzoom| zoom <= maxzoom) + } + + fn get_catalog_entry(&self) -> CatalogSourceEntry { + let id = self.get_id(); + let tilejson = self.get_tilejson(); + let info = self.get_tile_info(); + CatalogSourceEntry { + content_type: info.format.content_type().to_string(), + content_encoding: info.encoding.content_encoding().map(ToString::to_string), + name: tilejson.name.as_ref().filter(|v| *v != id).cloned(), + description: tilejson.description.clone(), + attribution: tilejson.attribution.clone(), + } + } } impl Clone for Box { @@ -158,12 +136,7 @@ impl Clone for Box { } #[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)] -pub struct SourceCatalog { - tiles: BTreeMap, -} - -#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)] -pub struct SourceEntry { +pub struct CatalogSourceEntry { pub content_type: String, #[serde(skip_serializing_if = "Option::is_none")] pub content_encoding: Option, diff --git a/martin/src/sprites/mod.rs b/martin/src/sprites/mod.rs index 519b280fb..dc205d6c4 100644 --- a/martin/src/sprites/mod.rs +++ b/martin/src/sprites/mod.rs @@ -1,14 +1,14 @@ use std::collections::hash_map::Entry; -use std::collections::HashMap; +use std::collections::{BTreeMap, HashMap}; use std::fmt::Debug; use std::path::PathBuf; use futures::future::try_join_all; use log::{info, warn}; +use serde::{Deserialize, Serialize}; use spreet::fs::get_svg_input_paths; -use spreet::resvg::tiny_skia::Pixmap; use spreet::resvg::usvg::{Error as ResvgError, Options, Tree, TreeParsing}; -use spreet::sprite::{generate_pixmap_from_svg, sprite_name, Spritesheet, SpritesheetBuilder}; +use spreet::sprite::{sprite_name, Sprite, Spritesheet, SpritesheetBuilder}; use tokio::io::AsyncReadExt; use crate::file_config::{FileConfigEnum, FileError}; @@ -43,25 +43,34 @@ pub enum SpriteError { UnableToGenerateSpritesheet, } -pub fn resolve_sprites(config: &mut Option) -> Result { - let Some(cfg) = config else { - return Ok(SpriteSources::default()); - }; +#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)] +pub struct CatalogSpriteEntry { + pub images: Vec, +} - let cfg = cfg.extract_file_config(); - let mut results = SpriteSources::default(); - let mut directories = Vec::new(); - let mut configs = HashMap::new(); +pub type SpriteCatalog = BTreeMap; - if let Some(sources) = cfg.sources { - for (id, source) in sources { - configs.insert(id.clone(), source.clone()); - add_source(id, source.abs_path()?, &mut results); - } - }; +#[derive(Debug, Clone, Default)] +pub struct SpriteSources(HashMap); + +impl SpriteSources { + pub fn resolve(config: &mut FileConfigEnum) -> Result { + let Some(cfg) = config.extract_file_config() else { + return Ok(Self::default()); + }; + + let mut results = Self::default(); + let mut directories = Vec::new(); + let mut configs = HashMap::new(); - if let Some(paths) = cfg.paths { - for path in paths { + if let Some(sources) = cfg.sources { + for (id, source) in sources { + configs.insert(id.clone(), source.clone()); + results.add_source(id, source.abs_path()?); + } + }; + + for path in cfg.paths { let Some(name) = path.file_name() else { warn!( "Ignoring sprite source with no name from {}", @@ -70,41 +79,46 @@ pub fn resolve_sprites(config: &mut Option) -> Result { - warn!("Ignoring duplicate sprite source {} from {disp_path} because it was already configured for {}", + pub fn get_catalog(&self) -> Result { + // TODO: all sprite generation should be pre-cached + Ok(self + .0 + .iter() + .map(|(id, source)| { + let mut images = get_svg_input_paths(&source.path, true) + .into_iter() + .map(|svg_path| sprite_name(svg_path, &source.path)) + .collect::>(); + images.sort(); + (id.clone(), CatalogSpriteEntry { images }) + }) + .collect()) + } + + fn add_source(&mut self, id: String, path: PathBuf) { + let disp_path = path.display(); + if path.is_file() { + warn!("Ignoring non-directory sprite source {id} from {disp_path}"); + } else { + match self.0.entry(id) { + Entry::Occupied(v) => { + warn!("Ignoring duplicate sprite source {} from {disp_path} because it was already configured for {}", v.key(), v.get().path.display()); + } + Entry::Vacant(v) => { + info!("Configured sprite source {} from {disp_path}", v.key()); + v.insert(SpriteSource { path }); + } } - Entry::Vacant(v) => { - info!("Configured sprite source {} from {disp_path}", v.key()); - v.insert(SpriteSource { path }); - } - } - }; -} - -#[derive(Debug, Clone, Default)] -pub struct SpriteSources(HashMap); - -impl SpriteSources { - pub fn get_sprite_source(&self, id: &str) -> Result<&SpriteSource, SpriteError> { - self.0 - .get(id) - .ok_or_else(|| SpriteError::SpriteNotFound(id.to_string())) + }; } /// Given a list of IDs in a format "id1,id2,id3", return a spritesheet with them all. @@ -115,10 +129,16 @@ impl SpriteSources { } else { (ids, 1) }; + let sprite_ids = ids .split(',') - .map(|id| self.get_sprite_source(id)) + .map(|id| { + self.0 + .get(id) + .ok_or_else(|| SpriteError::SpriteNotFound(id.to_string())) + }) .collect::, SpriteError>>()?; + get_spritesheet(sprite_ids.into_iter(), dpi).await } } @@ -132,7 +152,7 @@ async fn parse_sprite( name: String, path: PathBuf, pixel_ratio: u8, -) -> Result<(String, Pixmap), SpriteError> { +) -> Result<(String, Sprite), SpriteError> { let on_err = |e| SpriteError::IoError(e, path.clone()); let mut file = tokio::fs::File::open(&path).await.map_err(on_err)?; @@ -143,10 +163,7 @@ async fn parse_sprite( let tree = Tree::from_data(&buffer, &Options::default()) .map_err(|e| SpriteError::SpriteParsingError(e, path.clone()))?; - let pixmap = generate_pixmap_from_svg(&tree, pixel_ratio) - .ok_or_else(|| SpriteError::UnableToReadSprite(path.clone()))?; - - Ok((name, pixmap)) + Ok((name, Sprite { tree, pixel_ratio })) } pub async fn get_spritesheet( @@ -191,7 +208,7 @@ mod tests { PathBuf::from("../tests/fixtures/sprites/src2"), ]); - let sprites = resolve_sprites(&mut cfg).unwrap().0; + let sprites = SpriteSources::resolve(&mut cfg).unwrap().0; assert_eq!(sprites.len(), 2); test_src(sprites.values(), 1, "all_1").await; diff --git a/martin/src/srv/mod.rs b/martin/src/srv/mod.rs index f88dbe558..6b8e739e6 100644 --- a/martin/src/srv/mod.rs +++ b/martin/src/srv/mod.rs @@ -2,6 +2,6 @@ mod config; mod server; pub use config::{SrvConfig, KEEP_ALIVE_DEFAULT, LISTEN_ADDRESSES_DEFAULT}; -pub use server::{new_server, router, RESERVED_KEYWORDS}; +pub use server::{new_server, router, Catalog, RESERVED_KEYWORDS}; -pub use crate::source::SourceEntry; +pub use crate::source::CatalogSourceEntry; diff --git a/martin/src/srv/server.rs b/martin/src/srv/server.rs index db2d2f621..f36cf3345 100755 --- a/martin/src/srv/server.rs +++ b/martin/src/srv/server.rs @@ -17,17 +17,20 @@ use actix_web::{ Result, }; use futures::future::try_join_all; +use itertools::Itertools as _; use log::error; use martin_tile_utils::{Encoding, Format, TileInfo}; -use serde::Deserialize; +use serde::{Deserialize, Serialize}; use tilejson::{tilejson, TileJSON}; -use crate::config::AllSources; -use crate::source::{Source, Sources, UrlQuery, Xyz}; -use crate::sprites::{SpriteError, SpriteSources}; +use crate::config::ServerState; +use crate::fonts::{FontCatalog, FontError, FontSources}; +use crate::source::{Source, TileCatalog, TileSources, UrlQuery}; +use crate::sprites::{SpriteCatalog, SpriteError, SpriteSources}; use crate::srv::config::{SrvConfig, KEEP_ALIVE_DEFAULT, LISTEN_ADDRESSES_DEFAULT}; use crate::utils::{decode_brotli, decode_gzip, encode_brotli, encode_gzip}; use crate::Error::BindingError; +use crate::{Error, Xyz}; /// List of keywords that cannot be used as source IDs. Some of these are reserved for future use. /// Reserved keywords must never end in a "dot number" (e.g. ".1"). @@ -43,6 +46,23 @@ static SUPPORTED_ENCODINGS: &[HeaderEnc] = &[ HeaderEnc::identity(), ]; +#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)] +pub struct Catalog { + pub tiles: TileCatalog, + pub sprites: SpriteCatalog, + pub fonts: FontCatalog, +} + +impl Catalog { + pub fn new(state: &ServerState) -> Result { + Ok(Self { + tiles: state.tiles.get_catalog(), + sprites: state.sprites.get_catalog()?, + fonts: state.fonts.get_catalog(), + }) + } +} + #[derive(Deserialize)] struct TileJsonRequest { source_ids: String, @@ -69,6 +89,19 @@ pub fn map_sprite_error(e: SpriteError) -> actix_web::Error { } } +pub fn map_font_error(e: FontError) -> actix_web::Error { + #[allow(clippy::enum_glob_use)] + use FontError::*; + match e { + FontNotFound(_) => ErrorNotFound(e.to_string()), + InvalidFontRangeStartEnd(_, _) + | InvalidFontRangeStart(_) + | InvalidFontRangeEnd(_) + | InvalidFontRange(_, _) => ErrorBadRequest(e.to_string()), + _ => map_internal_error(e), + } +} + /// Root path will eventually have a web front. For now, just a stub. #[route("/", method = "GET", method = "HEAD")] #[allow(clippy::unused_async)] @@ -95,8 +128,8 @@ async fn get_health() -> impl Responder { wrap = "middleware::Compress::default()" )] #[allow(clippy::unused_async)] -async fn get_catalog(sources: Data) -> impl Responder { - HttpResponse::Ok().json(sources.get_catalog()) +async fn get_catalog(catalog: Data) -> impl Responder { + HttpResponse::Ok().json(catalog) } #[route("/sprite/{source_ids}.png", method = "GET", method = "HEAD")] @@ -130,6 +163,28 @@ async fn get_sprite_json( Ok(HttpResponse::Ok().json(sheet.get_index())) } +#[derive(Deserialize, Debug)] +struct FontRequest { + fontstack: String, + start: u32, + end: u32, +} + +#[route( + "/font/{fontstack}/{start}-{end}", + method = "GET", + wrap = "middleware::Compress::default()" +)] +#[allow(clippy::unused_async)] +async fn get_font(path: Path, fonts: Data) -> Result { + let data = fonts + .get_font_range(&path.fontstack, path.start, path.end) + .map_err(map_font_error)?; + Ok(HttpResponse::Ok() + .content_type("application/x-protobuf") + .body(data)) +} + #[route( "/{source_ids}", method = "GET", @@ -140,22 +195,23 @@ async fn get_sprite_json( async fn git_source_info( req: HttpRequest, path: Path, - sources: Data, + sources: Data, ) -> Result { let sources = sources.get_sources(&path.source_ids, None)?.0; - - let tiles_path = req - .headers() - .get("x-rewrite-url") - .and_then(parse_x_rewrite_url) - .unwrap_or_else(|| req.path().to_owned()); - let info = req.connection_info(); + let tiles_path = get_request_path(&req); let tiles_url = get_tiles_url(info.scheme(), info.host(), req.query_string(), &tiles_path)?; Ok(HttpResponse::Ok().json(merge_tilejson(sources, tiles_url))) } +fn get_request_path(req: &HttpRequest) -> String { + req.headers() + .get("x-rewrite-url") + .and_then(parse_x_rewrite_url) + .unwrap_or_else(|| req.path().to_owned()) +} + fn get_tiles_url(scheme: &str, host: &str, query_string: &str, tiles_path: &str) -> Result { let path_and_query = if query_string.is_empty() { format!("{tiles_path}/{{z}}/{{x}}/{{y}}") @@ -174,7 +230,7 @@ fn get_tiles_url(scheme: &str, host: &str, query_string: &str, tiles_path: &str) fn merge_tilejson(sources: Vec<&dyn Source>, tiles_url: String) -> TileJSON { if sources.len() == 1 { - let mut tj = sources[0].get_tilejson(); + let mut tj = sources[0].get_tilejson().clone(); tj.tiles = vec![tiles_url]; return tj; } @@ -189,15 +245,15 @@ fn merge_tilejson(sources: Vec<&dyn Source>, tiles_url: String) -> TileJSON { for src in sources { let tj = src.get_tilejson(); - if let Some(vector_layers) = tj.vector_layers { + if let Some(vector_layers) = &tj.vector_layers { if let Some(ref mut a) = result.vector_layers { - a.extend(vector_layers); + a.extend(vector_layers.iter().cloned()); } else { - result.vector_layers = Some(vector_layers); + result.vector_layers = Some(vector_layers.clone()); } } - if let Some(v) = tj.attribution { + if let Some(v) = &tj.attribution { if !attributions.contains(&v) { attributions.push(v); } @@ -216,7 +272,7 @@ fn merge_tilejson(sources: Vec<&dyn Source>, tiles_url: String) -> TileJSON { result.center = tj.center; } - if let Some(v) = tj.description { + if let Some(v) = &tj.description { if !descriptions.contains(&v) { descriptions.push(v); } @@ -242,7 +298,7 @@ fn merge_tilejson(sources: Vec<&dyn Source>, tiles_url: String) -> TileJSON { } } - if let Some(name) = tj.name { + if let Some(name) = &tj.name { if !names.contains(&name) { names.push(name); } @@ -250,15 +306,15 @@ fn merge_tilejson(sources: Vec<&dyn Source>, tiles_url: String) -> TileJSON { } if !attributions.is_empty() { - result.attribution = Some(attributions.join("\n")); + result.attribution = Some(attributions.into_iter().join("\n")); } if !descriptions.is_empty() { - result.description = Some(descriptions.join("\n")); + result.description = Some(descriptions.into_iter().join("\n")); } if !names.is_empty() { - result.name = Some(names.join(",")); + result.name = Some(names.into_iter().join(",")); } result @@ -268,7 +324,7 @@ fn merge_tilejson(sources: Vec<&dyn Source>, tiles_url: String) -> TileJSON { async fn get_tile( req: HttpRequest, path: Path, - sources: Data, + sources: Data, ) -> Result { let xyz = Xyz { z: path.z, @@ -306,7 +362,7 @@ async fn get_tile( let id = &path.source_ids; let zoom = xyz.z; let src = sources.get_source(id)?; - if !Sources::check_zoom(src, id, zoom) { + if !TileSources::check_zoom(src, id, zoom) { return Err(ErrorNotFound(format!( "Zoom {zoom} is not valid for source {id}", ))); @@ -409,11 +465,13 @@ pub fn router(cfg: &mut web::ServiceConfig) { .service(git_source_info) .service(get_tile) .service(get_sprite_json) - .service(get_sprite_png); + .service(get_sprite_png) + .service(get_font); } /// Create a new initialized Actix `App` instance together with the listening address. -pub fn new_server(config: SrvConfig, all_sources: AllSources) -> crate::Result<(Server, String)> { +pub fn new_server(config: SrvConfig, state: ServerState) -> crate::Result<(Server, String)> { + let catalog = Catalog::new(&state)?; let keep_alive = Duration::from_secs(config.keep_alive.unwrap_or(KEEP_ALIVE_DEFAULT)); let worker_processes = config.worker_processes.unwrap_or_else(num_cpus::get); let listen_addresses = config @@ -427,8 +485,10 @@ pub fn new_server(config: SrvConfig, all_sources: AllSources) -> crate::Result<( .allowed_headers(vec![AUTHORIZATION, ACCEPT]); App::new() - .app_data(Data::new(all_sources.sources.clone())) - .app_data(Data::new(all_sources.sprites.clone())) + .app_data(Data::new(state.tiles.clone())) + .app_data(Data::new(state.sprites.clone())) + .app_data(Data::new(state.fonts.clone())) + .app_data(Data::new(catalog.clone())) .wrap(cors_middleware) .wrap(middleware::NormalizePath::new(TrailingSlash::MergeOnly)) .wrap(middleware::Logger::default()) @@ -470,23 +530,19 @@ mod tests { #[async_trait] impl Source for TestSource { - fn get_tilejson(&self) -> TileJSON { - self.tj.clone() + fn get_id(&self) -> &str { + "id" } - fn get_tile_info(&self) -> TileInfo { - unimplemented!() + fn get_tilejson(&self) -> &TileJSON { + &self.tj } - fn clone_source(&self) -> Box { - unimplemented!() - } - - fn is_valid_zoom(&self, _zoom: u8) -> bool { + fn get_tile_info(&self) -> TileInfo { unimplemented!() } - fn support_url_query(&self) -> bool { + fn clone_source(&self) -> Box { unimplemented!() } diff --git a/martin/src/utils/cfg_containers.rs b/martin/src/utils/cfg_containers.rs new file mode 100644 index 000000000..6f7c9c2e4 --- /dev/null +++ b/martin/src/utils/cfg_containers.rs @@ -0,0 +1,143 @@ +use std::vec::IntoIter; + +use serde::{Deserialize, Serialize}; + +/// A serde helper to store a boolean as an object. +#[derive(Clone, Debug, Default, PartialEq, Serialize, Deserialize)] +#[serde(untagged)] +pub enum OptBoolObj { + #[default] + #[serde(skip)] + NoValue, + Bool(bool), + Object(T), +} + +impl OptBoolObj { + pub fn is_none(&self) -> bool { + matches!(self, Self::NoValue) + } +} + +#[derive(Debug, Default, Clone, PartialEq, Serialize, Deserialize)] +#[serde(untagged)] +pub enum OptOneMany { + #[default] + NoVals, + One(T), + Many(Vec), +} + +impl IntoIterator for OptOneMany { + type Item = T; + type IntoIter = IntoIter; + + fn into_iter(self) -> Self::IntoIter { + match self { + Self::NoVals => Vec::new().into_iter(), + Self::One(v) => vec![v].into_iter(), + Self::Many(v) => v.into_iter(), + } + } +} + +impl OptOneMany { + pub fn new>(iter: I) -> Self { + let mut iter = iter.into_iter(); + match (iter.next(), iter.next()) { + (Some(first), Some(second)) => { + let mut vec = Vec::with_capacity(iter.size_hint().0 + 2); + vec.push(first); + vec.push(second); + vec.extend(iter); + Self::Many(vec) + } + (Some(first), None) => Self::One(first), + (None, _) => Self::NoVals, + } + } + + pub fn is_none(&self) -> bool { + matches!(self, Self::NoVals) + } + + pub fn is_empty(&self) -> bool { + match self { + Self::NoVals => true, + Self::One(_) => false, + Self::Many(v) => v.is_empty(), + } + } + + pub fn iter(&self) -> impl Iterator { + match self { + Self::NoVals => [].iter(), + Self::One(v) => std::slice::from_ref(v).iter(), + Self::Many(v) => v.iter(), + } + } + + pub fn opt_iter(&self) -> Option> { + match self { + Self::NoVals => None, + Self::One(v) => Some(std::slice::from_ref(v).iter()), + Self::Many(v) => Some(v.iter()), + } + } + + pub fn iter_mut(&mut self) -> impl Iterator { + match self { + Self::NoVals => [].iter_mut(), + Self::One(v) => std::slice::from_mut(v).iter_mut(), + Self::Many(v) => v.iter_mut(), + } + } + + pub fn as_slice(&self) -> &[T] { + match self { + Self::NoVals => &[], + Self::One(item) => std::slice::from_ref(item), + Self::Many(v) => v.as_slice(), + } + } +} + +#[cfg(test)] +mod tests { + use super::*; + use crate::OptOneMany::{Many, NoVals, One}; + + #[test] + fn test_one_or_many() { + let mut noval: OptOneMany = NoVals; + let mut one = One(1); + let mut many = Many(vec![1, 2, 3]); + + assert_eq!(OptOneMany::new(vec![1, 2, 3]), Many(vec![1, 2, 3])); + assert_eq!(OptOneMany::new(vec![1]), One(1)); + assert_eq!(OptOneMany::new(Vec::::new()), NoVals); + + assert_eq!(noval.iter_mut().collect::>(), Vec::<&i32>::new()); + assert_eq!(one.iter_mut().collect::>(), vec![&1]); + assert_eq!(many.iter_mut().collect::>(), vec![&1, &2, &3]); + + assert_eq!(noval.iter().collect::>(), Vec::<&i32>::new()); + assert_eq!(one.iter().collect::>(), vec![&1]); + assert_eq!(many.iter().collect::>(), vec![&1, &2, &3]); + + assert_eq!(noval.opt_iter().map(Iterator::collect::>), None); + assert_eq!(one.opt_iter().map(Iterator::collect), Some(vec![&1])); + assert_eq!( + many.opt_iter().map(Iterator::collect), + Some(vec![&1, &2, &3]) + ); + + assert_eq!(noval.as_slice(), Vec::::new().as_slice()); + assert_eq!(one.as_slice(), &[1]); + assert_eq!(many.as_slice(), &[1, 2, 3]); + + assert_eq!(noval.into_iter().collect::>(), Vec::::new()); + assert_eq!(one.into_iter().collect::>(), vec![1]); + assert_eq!(many.into_iter().collect::>(), vec![1, 2, 3]); + } +} diff --git a/martin/src/utils/error.rs b/martin/src/utils/error.rs index 44829c34e..bc28e4efc 100644 --- a/martin/src/utils/error.rs +++ b/martin/src/utils/error.rs @@ -1,16 +1,38 @@ +use std::fmt::Write; use std::io; use std::path::PathBuf; use crate::file_config::FileError; +use crate::fonts::FontError; use crate::pg::PgError; use crate::sprites::SpriteError; pub type Result = std::result::Result; +fn elide_vec(vec: &[String], max_items: usize, max_len: usize) -> String { + let mut s = String::new(); + for (i, v) in vec.iter().enumerate() { + if i > max_items { + let _ = write!(s, " and {} more", vec.len() - i); + break; + } + if i > 0 { + s.push(' '); + } + if v.len() > max_len { + s.push_str(&v[..max_len]); + s.push('…'); + } else { + s.push_str(v); + } + } + s +} + #[derive(thiserror::Error, Debug)] pub enum Error { - #[error("The --config and the connection parameters cannot be used together")] - ConfigAndConnectionsError, + #[error("The --config and the connection parameters cannot be used together. Please remove unsupported parameters '{}'", elide_vec(.0, 3, 15))] + ConfigAndConnectionsError(Vec), #[error("Unable to bind to {1}: {0}")] BindingError(io::Error, String), @@ -38,4 +60,7 @@ pub enum Error { #[error("{0}")] SpriteError(#[from] SpriteError), + + #[error("{0}")] + FontError(#[from] FontError), } diff --git a/martin/src/utils/mod.rs b/martin/src/utils/mod.rs index 43aff165d..81ab8ec9f 100644 --- a/martin/src/utils/mod.rs +++ b/martin/src/utils/mod.rs @@ -1,9 +1,11 @@ +mod cfg_containers; mod error; mod id_resolver; -mod one_or_many; mod utilities; +mod xyz; +pub use cfg_containers::{OptBoolObj, OptOneMany}; pub use error::*; pub use id_resolver::IdResolver; -pub use one_or_many::OneOrMany; pub use utilities::*; +pub use xyz::Xyz; diff --git a/martin/src/utils/one_or_many.rs b/martin/src/utils/one_or_many.rs deleted file mode 100644 index c7cdd7808..000000000 --- a/martin/src/utils/one_or_many.rs +++ /dev/null @@ -1,95 +0,0 @@ -use std::vec::IntoIter; - -use serde::{Deserialize, Serialize}; - -#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)] -#[serde(untagged)] -pub enum OneOrMany { - One(T), - Many(Vec), -} - -impl IntoIterator for OneOrMany { - type Item = T; - type IntoIter = IntoIter; - - fn into_iter(self) -> Self::IntoIter { - match self { - Self::One(v) => vec![v].into_iter(), - Self::Many(v) => v.into_iter(), - } - } -} - -impl OneOrMany { - pub fn new_opt>(iter: I) -> Option { - let mut iter = iter.into_iter(); - match (iter.next(), iter.next()) { - (Some(first), Some(second)) => { - let mut vec = Vec::with_capacity(iter.size_hint().0 + 2); - vec.push(first); - vec.push(second); - vec.extend(iter); - Some(Self::Many(vec)) - } - (Some(first), None) => Some(Self::One(first)), - (None, _) => None, - } - } - - pub fn is_empty(&self) -> bool { - match self { - Self::One(_) => false, - Self::Many(v) => v.is_empty(), - } - } - - pub fn iter(&self) -> impl Iterator { - match self { - OneOrMany::Many(v) => v.iter(), - OneOrMany::One(v) => std::slice::from_ref(v).iter(), - } - } - - pub fn iter_mut(&mut self) -> impl Iterator { - match self { - Self::Many(v) => v.iter_mut(), - Self::One(v) => std::slice::from_mut(v).iter_mut(), - } - } - - pub fn as_slice(&self) -> &[T] { - match self { - Self::One(item) => std::slice::from_ref(item), - Self::Many(v) => v.as_slice(), - } - } -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::OneOrMany::{Many, One}; - - #[test] - fn test_one_or_many() { - let mut one = One(1); - let mut many = Many(vec![1, 2, 3]); - - assert_eq!(OneOrMany::new_opt(vec![1, 2, 3]), Some(Many(vec![1, 2, 3]))); - assert_eq!(OneOrMany::new_opt(vec![1]), Some(One(1))); - assert_eq!(OneOrMany::new_opt(Vec::::new()), None); - - assert_eq!(one.iter_mut().collect::>(), vec![&1]); - assert_eq!(many.iter_mut().collect::>(), vec![&1, &2, &3]); - - assert_eq!(one.iter().collect::>(), vec![&1]); - assert_eq!(many.iter().collect::>(), vec![&1, &2, &3]); - - assert_eq!(one.as_slice(), &[1]); - assert_eq!(many.as_slice(), &[1, 2, 3]); - - assert_eq!(one.into_iter().collect::>(), vec![1]); - assert_eq!(many.into_iter().collect::>(), vec![1, 2, 3]); - } -} diff --git a/martin/src/utils/utilities.rs b/martin/src/utils/utilities.rs index c7c67d2ed..a8eab5b21 100644 --- a/martin/src/utils/utilities.rs +++ b/martin/src/utils/utilities.rs @@ -1,23 +1,13 @@ use std::collections::{BTreeMap, HashMap}; +use std::future::Future; use std::io::{Read as _, Write as _}; +use std::time::Duration; use flate2::read::GzDecoder; use flate2::write::GzEncoder; -use serde::{Deserialize, Serialize, Serializer}; - -#[must_use] -pub fn is_valid_zoom(zoom: u8, minzoom: Option, maxzoom: Option) -> bool { - minzoom.map_or(true, |minzoom| zoom >= minzoom) - && maxzoom.map_or(true, |maxzoom| zoom <= maxzoom) -} - -/// A serde helper to store a boolean as an object. -#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)] -#[serde(untagged)] -pub enum BoolOrObject { - Bool(bool), - Object(T), -} +use futures::pin_mut; +use serde::{Serialize, Serializer}; +use tokio::time::timeout; /// Sort an optional hashmap by key, case-insensitive first, then case-sensitive pub fn sorted_opt_map( @@ -28,9 +18,22 @@ pub fn sorted_opt_map( } pub fn sorted_btree_map(value: &HashMap) -> BTreeMap<&K, &V> { - let mut items: Vec<(_, _)> = value.iter().collect(); - items.sort_by(|a, b| a.0.cmp(b.0)); - BTreeMap::from_iter(items) + value.iter().collect() +} + +#[cfg(test)] +pub fn sorted_opt_set( + value: &Option>, + serializer: S, +) -> Result { + value + .as_ref() + .map(|v| { + let mut v: Vec<_> = v.iter().collect(); + v.sort(); + v + }) + .serialize(serializer) } pub fn decode_gzip(data: &[u8]) -> Result, std::io::Error> { @@ -58,3 +61,17 @@ pub fn encode_brotli(data: &[u8]) -> Result, std::io::Error> { encoder.write_all(data)?; Ok(encoder.into_inner()) } + +pub async fn on_slow( + future: impl Future, + duration: Duration, + fn_on_slow: S, +) -> T { + pin_mut!(future); + if let Ok(result) = timeout(duration, &mut future).await { + result + } else { + fn_on_slow(); + future.await + } +} diff --git a/martin/src/utils/xyz.rs b/martin/src/utils/xyz.rs new file mode 100644 index 000000000..599ebd5e6 --- /dev/null +++ b/martin/src/utils/xyz.rs @@ -0,0 +1,18 @@ +use std::fmt::{Display, Formatter}; + +#[derive(Debug, Copy, Clone)] +pub struct Xyz { + pub z: u8, + pub x: u32, + pub y: u32, +} + +impl Display for Xyz { + fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + if f.alternate() { + write!(f, "{}/{}/{}", self.z, self.x, self.y) + } else { + write!(f, "{},{},{}", self.z, self.x, self.y) + } + } +} diff --git a/tests/mb_server_test.rs b/martin/tests/mb_server_test.rs similarity index 82% rename from tests/mb_server_test.rs rename to martin/tests/mb_server_test.rs index 912c92a0f..046920f24 100644 --- a/tests/mb_server_test.rs +++ b/martin/tests/mb_server_test.rs @@ -2,8 +2,8 @@ use actix_web::http::header::{ACCEPT_ENCODING, CONTENT_ENCODING, CONTENT_TYPE}; use actix_web::test::{call_service, read_body, read_body_json, TestRequest}; use ctor::ctor; use indoc::indoc; +use insta::assert_yaml_snapshot; use martin::decode_gzip; -use martin::srv::IndexEntry; use tilejson::TileJSON; pub mod utils; @@ -16,11 +16,13 @@ fn init() { macro_rules! create_app { ($sources:expr) => {{ - let sources = mock_sources(mock_cfg($sources)).await.0; - let state = crate::utils::mock_app_data(sources).await; + let state = mock_sources(mock_cfg($sources)).await.0; ::actix_web::test::init_service( ::actix_web::App::new() - .app_data(state) + .app_data(actix_web::web::Data::new( + ::martin::srv::Catalog::new(&state).unwrap(), + )) + .app_data(actix_web::web::Data::new(state.tiles)) .configure(::martin::srv::router), ) .await @@ -34,10 +36,10 @@ fn test_get(path: &str) -> TestRequest { const CONFIG: &str = indoc! {" mbtiles: sources: - m_json: tests/fixtures/files/json.mbtiles - m_mvt: tests/fixtures/files/world_cities.mbtiles - m_raw_mvt: tests/fixtures/files/uncompressed_mvt.mbtiles - m_webp: tests/fixtures/files/webp.mbtiles + m_json: ../tests/fixtures/mbtiles/json.mbtiles + m_mvt: ../tests/fixtures/mbtiles/world_cities.mbtiles + m_raw_mvt: ../tests/fixtures/mbtiles/uncompressed_mvt.mbtiles + m_webp: ../tests/fixtures/mbtiles/webp.mbtiles "}; #[actix_rt::test] @@ -47,11 +49,28 @@ async fn mbt_get_catalog() { let req = test_get("/catalog").to_request(); let response = call_service(&app, req).await; assert!(response.status().is_success()); - let body = read_body(response).await; - let sources: Vec = serde_json::from_slice(&body).unwrap(); - assert_eq!(sources.iter().filter(|v| v.id == "m_mvt").count(), 1); - assert_eq!(sources.iter().filter(|v| v.id == "m_webp").count(), 1); - assert_eq!(sources.iter().filter(|v| v.id == "m_raw_mvt").count(), 1); + let body: serde_json::Value = read_body_json(response).await; + assert_yaml_snapshot!(body, @r###" + --- + tiles: + m_json: + content_type: application/json + name: Dummy json data + m_mvt: + content_type: application/x-protobuf + content_encoding: gzip + name: Major cities from Natural Earth data + description: Major cities from Natural Earth data + m_raw_mvt: + content_type: application/x-protobuf + name: Major cities from Natural Earth data + description: Major cities from Natural Earth data + m_webp: + content_type: image/webp + name: ne2sr + sprites: {} + fonts: {} + "###); } #[actix_rt::test] @@ -62,10 +81,28 @@ async fn mbt_get_catalog_gzip() { let response = call_service(&app, req).await; assert!(response.status().is_success()); let body = decode_gzip(&read_body(response).await).unwrap(); - let sources: Vec = serde_json::from_slice(&body).unwrap(); - assert_eq!(sources.iter().filter(|v| v.id == "m_mvt").count(), 1); - assert_eq!(sources.iter().filter(|v| v.id == "m_webp").count(), 1); - assert_eq!(sources.iter().filter(|v| v.id == "m_raw_mvt").count(), 1); + let body: serde_json::Value = serde_json::from_slice(&body).unwrap(); + assert_yaml_snapshot!(body, @r###" + --- + tiles: + m_json: + content_type: application/json + name: Dummy json data + m_mvt: + content_type: application/x-protobuf + content_encoding: gzip + name: Major cities from Natural Earth data + description: Major cities from Natural Earth data + m_raw_mvt: + content_type: application/x-protobuf + name: Major cities from Natural Earth data + description: Major cities from Natural Earth data + m_webp: + content_type: image/webp + name: ne2sr + sprites: {} + fonts: {} + "###); } #[actix_rt::test] diff --git a/tests/pg_function_source_test.rs b/martin/tests/pg_function_source_test.rs similarity index 68% rename from tests/pg_function_source_test.rs rename to martin/tests/pg_function_source_test.rs index 2db3a3791..b0940a109 100644 --- a/tests/pg_function_source_test.rs +++ b/martin/tests/pg_function_source_test.rs @@ -1,6 +1,6 @@ use ctor::ctor; use indoc::indoc; -use itertools::Itertools; +use insta::assert_yaml_snapshot; use martin::Xyz; pub mod utils; @@ -14,18 +14,15 @@ fn init() { #[actix_rt::test] async fn function_source_tilejson() { let mock = mock_sources(mock_pgcfg("connection_string: $DATABASE_URL")).await; - assert_eq!( - source(&mock, "function_zxy_query").get_tilejson(), - serde_json::from_str(indoc! {r#" -{ - "name": "function_zxy_query", - "description": "public.function_zxy_query", - "tilejson": "3.0.0", - "tiles": [] -} - "#}) - .unwrap() - ); + let tj = source(&mock, "function_zxy_query").get_tilejson(); + assert_yaml_snapshot!(tj, @r###" + --- + tilejson: 3.0.0 + tiles: [] + name: function_zxy_query + foo: + bar: foo + "###); } #[actix_rt::test] @@ -55,9 +52,11 @@ async fn function_source_schemas() { functions: from_schemas: MixedCase "}); - let sources = mock_sources(cfg).await.0; - assert_eq!( - sources.keys().sorted().collect::>(), - vec!["function_Mixed_Name"], - ); + let sources = mock_sources(cfg).await.0.tiles; + assert_yaml_snapshot!(sources.get_catalog(), @r###" + --- + function_Mixed_Name: + content_type: application/x-protobuf + description: a function source with MixedCase name + "###); } diff --git a/tests/pg_server_test.rs b/martin/tests/pg_server_test.rs similarity index 85% rename from tests/pg_server_test.rs rename to martin/tests/pg_server_test.rs index 1d65869c3..3a48728a3 100644 --- a/tests/pg_server_test.rs +++ b/martin/tests/pg_server_test.rs @@ -3,8 +3,8 @@ use actix_web::http::StatusCode; use actix_web::test::{call_and_read_body_json, call_service, read_body, TestRequest}; use ctor::ctor; use indoc::indoc; -use martin::srv::IndexEntry; -use martin::OneOrMany; +use insta::assert_yaml_snapshot; +use martin::OptOneMany; use tilejson::TileJSON; pub mod utils; @@ -18,11 +18,13 @@ fn init() { macro_rules! create_app { ($sources:expr) => {{ let cfg = mock_cfg(indoc::indoc!($sources)); - let sources = mock_sources(cfg).await.0; - let state = crate::utils::mock_app_data(sources).await; + let state = mock_sources(cfg).await.0; ::actix_web::test::init_service( ::actix_web::App::new() - .app_data(state) + .app_data(actix_web::web::Data::new( + ::martin::srv::Catalog::new(&state).unwrap(), + )) + .app_data(actix_web::web::Data::new(state.tiles)) .configure(::martin::srv::router), ) .await @@ -44,16 +46,77 @@ postgres: let response = call_service(&app, req).await; assert!(response.status().is_success()); let body = read_body(response).await; - let sources: Vec = serde_json::from_slice(&body).unwrap(); - - let expected = "table_source"; - assert_eq!(sources.iter().filter(|v| v.id == expected).count(), 1); - - let expected = "function_zxy_query"; - assert_eq!(sources.iter().filter(|v| v.id == expected).count(), 1); - - let expected = "function_zxy_query_jsonb"; - assert_eq!(sources.iter().filter(|v| v.id == expected).count(), 1); + let body: serde_json::Value = serde_json::from_slice(&body).unwrap(); + assert_yaml_snapshot!(body, @r###" + --- + tiles: + MixPoints: + content_type: application/x-protobuf + description: a description from comment on table + auto_table: + content_type: application/x-protobuf + description: autodetect.auto_table.geom + bigint_table: + content_type: application/x-protobuf + description: autodetect.bigint_table.geom + function_Mixed_Name: + content_type: application/x-protobuf + description: a function source with MixedCase name + function_null: + content_type: application/x-protobuf + description: public.function_null + function_null_row: + content_type: application/x-protobuf + description: public.function_null_row + function_null_row2: + content_type: application/x-protobuf + description: public.function_null_row2 + function_zoom_xy: + content_type: application/x-protobuf + description: public.function_zoom_xy + function_zxy: + content_type: application/x-protobuf + description: public.function_zxy + function_zxy2: + content_type: application/x-protobuf + description: public.function_zxy2 + function_zxy_query: + content_type: application/x-protobuf + function_zxy_query_jsonb: + content_type: application/x-protobuf + description: public.function_zxy_query_jsonb + function_zxy_query_test: + content_type: application/x-protobuf + description: public.function_zxy_query_test + function_zxy_row: + content_type: application/x-protobuf + description: public.function_zxy_row + function_zxy_row_key: + content_type: application/x-protobuf + description: public.function_zxy_row_key + points1: + content_type: application/x-protobuf + description: public.points1.geom + points1_vw: + content_type: application/x-protobuf + description: public.points1_vw.geom + points2: + content_type: application/x-protobuf + description: public.points2.geom + points3857: + content_type: application/x-protobuf + description: public.points3857.geom + table_source: + content_type: application/x-protobuf + table_source_multiple_geom: + content_type: application/x-protobuf + description: public.table_source_multiple_geom.geom1 + table_source_multiple_geom.1: + content_type: application/x-protobuf + description: public.table_source_multiple_geom.geom2 + sprites: {} + fonts: {} + "###); } #[actix_rt::test] @@ -877,7 +940,6 @@ postgres: let req = test_get("/function_zxy_query_test/0/0/0"); let response = call_service(&app, req).await; - println!("response.status = {:?}", response.status()); assert!(response.status().is_server_error()); let req = test_get("/function_zxy_query_test/0/0/0?token=martin"); @@ -948,49 +1010,92 @@ tables: let src = table(&mock, "no_id"); assert_eq!(src.id_column, None); assert!(matches!(&src.properties, Some(v) if v.len() == 1)); - assert_eq!( - source(&mock, "no_id").get_tilejson(), - serde_json::from_str(indoc! {r#" -{ - "name": "no_id", - "description": "MixedCase.MixPoints.Geom", - "tilejson": "3.0.0", - "tiles": [], - "vector_layers": [ - { - "id": "no_id", - "fields": {"TABLE": "text"} - } - ], - "bounds": [-180.0, -90.0, 180.0, 90.0] -} - "#}) - .unwrap() - ); - - let src = table(&mock, "id_only"); - assert_eq!(src.id_column, some("giD")); - assert!(matches!(&src.properties, Some(v) if v.len() == 1)); + let tj = source(&mock, "no_id").get_tilejson(); + assert_yaml_snapshot!(tj, @r###" + --- + tilejson: 3.0.0 + tiles: [] + vector_layers: + - id: no_id + fields: + TABLE: text + bounds: + - -180 + - -90 + - 180 + - 90 + description: MixedCase.MixPoints.Geom + name: no_id + "###); + + assert_yaml_snapshot!(table(&mock, "id_only"), @r###" + --- + schema: MixedCase + table: MixPoints + srid: 4326 + geometry_column: Geom + id_column: giD + bounds: + - -180 + - -90 + - 180 + - 90 + geometry_type: POINT + properties: + TABLE: text + "###); - let src = table(&mock, "id_and_prop"); - assert_eq!(src.id_column, some("giD")); - assert!(matches!(&src.properties, Some(v) if v.len() == 2)); + assert_yaml_snapshot!(table(&mock, "id_and_prop"), @r###" + --- + schema: MixedCase + table: MixPoints + srid: 4326 + geometry_column: Geom + id_column: giD + bounds: + - -180 + - -90 + - 180 + - 90 + geometry_type: POINT + properties: + TABLE: text + giD: int4 + "###); - let src = table(&mock, "prop_only"); - assert_eq!(src.id_column, None); - assert!(matches!(&src.properties, Some(v) if v.len() == 2)); + assert_yaml_snapshot!(table(&mock, "prop_only"), @r###" + --- + schema: MixedCase + table: MixPoints + srid: 4326 + geometry_column: Geom + bounds: + - -180 + - -90 + - 180 + - 90 + geometry_type: POINT + properties: + TABLE: text + giD: int4 + "###); // -------------------------------------------- - let state = mock_app_data(mock.0).await; + let state = mock_sources(cfg.clone()).await.0; let app = ::actix_web::test::init_service( ::actix_web::App::new() - .app_data(state) + .app_data(actix_web::web::Data::new( + ::martin::srv::Catalog::new(&state).unwrap(), + )) + .app_data(actix_web::web::Data::new(state.tiles)) .configure(::martin::srv::router), ) .await; - let OneOrMany::One(cfg) = cfg.postgres.unwrap() else { panic!() }; + let OptOneMany::One(cfg) = cfg.postgres else { + panic!() + }; for (name, _) in cfg.tables.unwrap_or_default() { let req = test_get(format!("/{name}/0/0/0").as_str()); let response = call_service(&app, req).await; diff --git a/martin/tests/pg_table_source_test.rs b/martin/tests/pg_table_source_test.rs new file mode 100644 index 000000000..6b1f28a14 --- /dev/null +++ b/martin/tests/pg_table_source_test.rs @@ -0,0 +1,185 @@ +use ctor::ctor; +use indoc::indoc; +use insta::assert_yaml_snapshot; +use martin::Xyz; + +pub mod utils; +pub use utils::*; + +#[ctor] +fn init() { + let _ = env_logger::builder().is_test(true).try_init(); +} + +#[actix_rt::test] +async fn table_source() { + let mock = mock_sources(mock_pgcfg("connection_string: $DATABASE_URL")).await; + assert_yaml_snapshot!(mock.0.tiles.get_catalog(), @r###" + --- + MixPoints: + content_type: application/x-protobuf + description: a description from comment on table + auto_table: + content_type: application/x-protobuf + description: autodetect.auto_table.geom + bigint_table: + content_type: application/x-protobuf + description: autodetect.bigint_table.geom + function_Mixed_Name: + content_type: application/x-protobuf + description: a function source with MixedCase name + function_null: + content_type: application/x-protobuf + description: public.function_null + function_null_row: + content_type: application/x-protobuf + description: public.function_null_row + function_null_row2: + content_type: application/x-protobuf + description: public.function_null_row2 + function_zoom_xy: + content_type: application/x-protobuf + description: public.function_zoom_xy + function_zxy: + content_type: application/x-protobuf + description: public.function_zxy + function_zxy2: + content_type: application/x-protobuf + description: public.function_zxy2 + function_zxy_query: + content_type: application/x-protobuf + function_zxy_query_jsonb: + content_type: application/x-protobuf + description: public.function_zxy_query_jsonb + function_zxy_query_test: + content_type: application/x-protobuf + description: public.function_zxy_query_test + function_zxy_row: + content_type: application/x-protobuf + description: public.function_zxy_row + function_zxy_row_key: + content_type: application/x-protobuf + description: public.function_zxy_row_key + points1: + content_type: application/x-protobuf + description: public.points1.geom + points1_vw: + content_type: application/x-protobuf + description: public.points1_vw.geom + points2: + content_type: application/x-protobuf + description: public.points2.geom + points3857: + content_type: application/x-protobuf + description: public.points3857.geom + table_source: + content_type: application/x-protobuf + table_source_multiple_geom: + content_type: application/x-protobuf + description: public.table_source_multiple_geom.geom1 + table_source_multiple_geom.1: + content_type: application/x-protobuf + description: public.table_source_multiple_geom.geom2 + "###); + + let source = table(&mock, "table_source"); + assert_yaml_snapshot!(source, @r###" + --- + schema: public + table: table_source + srid: 4326 + geometry_column: geom + bounds: + - -2 + - -1 + - 142.84131509869133 + - 45 + geometry_type: GEOMETRY + properties: + gid: int4 + "###); +} + +#[actix_rt::test] +async fn tables_tilejson() { + let mock = mock_sources(mock_pgcfg("connection_string: $DATABASE_URL")).await; + let tj = source(&mock, "table_source").get_tilejson(); + assert_yaml_snapshot!(tj, @r###" + --- + tilejson: 3.0.0 + tiles: [] + vector_layers: + - id: table_source + fields: + gid: int4 + bounds: + - -2 + - -1 + - 142.84131509869133 + - 45 + name: table_source + foo: + bar: foo + "###); +} + +#[actix_rt::test] +async fn tables_tile_ok() { + let mock = mock_sources(mock_pgcfg("connection_string: $DATABASE_URL")).await; + let tile = source(&mock, "table_source") + .get_tile(&Xyz { z: 0, x: 0, y: 0 }, &None) + .await + .unwrap(); + + assert!(!tile.is_empty()); +} + +#[actix_rt::test] +async fn tables_srid_ok() { + let mock = mock_sources(mock_pgcfg(indoc! {" + connection_string: $DATABASE_URL + default_srid: 900913 + "})) + .await; + + let source = table(&mock, "points1"); + assert_eq!(source.srid, 4326); + + let source = table(&mock, "points2"); + assert_eq!(source.srid, 4326); + + let source = table(&mock, "points3857"); + assert_eq!(source.srid, 3857); + + let source = table(&mock, "points_empty_srid"); + assert_eq!(source.srid, 900_913); +} + +#[actix_rt::test] +async fn tables_multiple_geom_ok() { + let mock = mock_sources(mock_pgcfg("connection_string: $DATABASE_URL")).await; + + let source = table(&mock, "table_source_multiple_geom"); + assert_eq!(source.geometry_column, "geom1"); + + let source = table(&mock, "table_source_multiple_geom.1"); + assert_eq!(source.geometry_column, "geom2"); +} + +#[actix_rt::test] +async fn table_source_schemas() { + let cfg = mock_pgcfg(indoc! {" + connection_string: $DATABASE_URL + auto_publish: + tables: + from_schemas: MixedCase + functions: false + "}); + let sources = mock_sources(cfg).await.0; + assert_yaml_snapshot!(sources.tiles.get_catalog(), @r###" + --- + MixPoints: + content_type: application/x-protobuf + description: a description from comment on table + "###); +} diff --git a/tests/pmt_server_test.rs b/martin/tests/pmt_server_test.rs similarity index 78% rename from tests/pmt_server_test.rs rename to martin/tests/pmt_server_test.rs index bcb81da41..b9f89628d 100644 --- a/tests/pmt_server_test.rs +++ b/martin/tests/pmt_server_test.rs @@ -2,8 +2,8 @@ use actix_web::http::header::{ACCEPT_ENCODING, CONTENT_ENCODING, CONTENT_TYPE}; use actix_web::test::{call_service, read_body, read_body_json, TestRequest}; use ctor::ctor; use indoc::indoc; +use insta::assert_yaml_snapshot; use martin::decode_gzip; -use martin::srv::IndexEntry; use tilejson::TileJSON; pub mod utils; @@ -16,11 +16,13 @@ fn init() { macro_rules! create_app { ($sources:expr) => {{ - let sources = mock_sources(mock_cfg($sources)).await.0; - let state = crate::utils::mock_app_data(sources).await; + let state = mock_sources(mock_cfg($sources)).await.0; ::actix_web::test::init_service( ::actix_web::App::new() - .app_data(state) + .app_data(actix_web::web::Data::new( + ::martin::srv::Catalog::new(&state).unwrap(), + )) + .app_data(actix_web::web::Data::new(state.tiles)) .configure(::martin::srv::router), ) .await @@ -34,22 +36,26 @@ fn test_get(path: &str) -> TestRequest { const CONFIG: &str = indoc! {" pmtiles: sources: - p_png: tests/fixtures/files/stamen_toner__raster_CC-BY+ODbL_z3.pmtiles + p_png: ../tests/fixtures/pmtiles/stamen_toner__raster_CC-BY+ODbL_z3.pmtiles "}; #[actix_rt::test] async fn pmt_get_catalog() { - let path = "pmtiles: tests/fixtures/files/stamen_toner__raster_CC-BY+ODbL_z3.pmtiles"; + let path = "pmtiles: ../tests/fixtures/pmtiles/stamen_toner__raster_CC-BY+ODbL_z3.pmtiles"; let app = create_app! { path }; let req = test_get("/catalog").to_request(); let response = call_service(&app, req).await; assert!(response.status().is_success()); - let body = read_body(response).await; - let sources: Vec = serde_json::from_slice(&body).unwrap(); - - let expected = "stamen_toner__raster_CC-BY-ODbL_z3"; - assert_eq!(sources.iter().filter(|v| v.id == expected).count(), 1); + let body: serde_json::Value = read_body_json(response).await; + assert_yaml_snapshot!(body, @r###" + --- + tiles: + stamen_toner__raster_CC-BY-ODbL_z3: + content_type: image/png + sprites: {} + fonts: {} + "###); } #[actix_rt::test] @@ -60,8 +66,15 @@ async fn pmt_get_catalog_gzip() { let response = call_service(&app, req).await; assert!(response.status().is_success()); let body = decode_gzip(&read_body(response).await).unwrap(); - let sources: Vec = serde_json::from_slice(&body).unwrap(); - assert_eq!(sources.iter().filter(|v| v.id == "p_png").count(), 1); + let body: serde_json::Value = serde_json::from_slice(&body).unwrap(); + assert_yaml_snapshot!(body, @r###" + --- + tiles: + p_png: + content_type: image/png + sprites: {} + fonts: {} + "###); } #[actix_rt::test] diff --git a/tests/utils/mod.rs b/martin/tests/utils/mod.rs similarity index 81% rename from tests/utils/mod.rs rename to martin/tests/utils/mod.rs index d15588ea7..e61dae3d0 100644 --- a/tests/utils/mod.rs +++ b/martin/tests/utils/mod.rs @@ -4,10 +4,8 @@ mod pg_utils; -use actix_web::web::Data; use log::warn; -use martin::srv::AppState; -use martin::{Config, Sources}; +use martin::Config; pub use pg_utils::*; #[path = "../../src/utils/test_utils.rs"] @@ -15,10 +13,6 @@ mod test_utils; #[allow(clippy::wildcard_imports)] pub use test_utils::*; -pub async fn mock_app_data(sources: Sources) -> Data { - Data::new(sources) -} - #[must_use] pub fn mock_cfg(yaml: &str) -> Config { let env = if let Ok(db_url) = std::env::var("DATABASE_URL") { diff --git a/tests/utils/pg_utils.rs b/martin/tests/utils/pg_utils.rs similarity index 87% rename from tests/utils/pg_utils.rs rename to martin/tests/utils/pg_utils.rs index b5c729d34..27b689420 100644 --- a/tests/utils/pg_utils.rs +++ b/martin/tests/utils/pg_utils.rs @@ -1,7 +1,7 @@ use indoc::formatdoc; pub use martin::args::Env; use martin::pg::TableInfo; -use martin::{Config, IdResolver, Source, Sources}; +use martin::{Config, IdResolver, ServerState, Source}; use crate::mock_cfg; @@ -10,7 +10,7 @@ use crate::mock_cfg; // Each function should allow dead_code as they might not be used by a specific test file. // -pub type MockSource = (Sources, Config); +pub type MockSource = (ServerState, Config); #[allow(dead_code)] #[must_use] @@ -34,8 +34,6 @@ pub fn table<'a>(mock: &'a MockSource, name: &str) -> &'a TableInfo { let (_, config) = mock; let vals: Vec<&TableInfo> = config .postgres - .as_ref() - .unwrap() .iter() .flat_map(|v| v.tables.iter().map(|vv| vv.get(name))) .flatten() @@ -48,5 +46,5 @@ pub fn table<'a>(mock: &'a MockSource, name: &str) -> &'a TableInfo { #[must_use] pub fn source<'a>(mock: &'a MockSource, name: &str) -> &'a dyn Source { let (sources, _) = mock; - sources.get(name).unwrap().as_ref() + sources.tiles.get_source(name).unwrap() } diff --git a/martin-mbtiles/.env b/mbtiles/.env similarity index 100% rename from martin-mbtiles/.env rename to mbtiles/.env diff --git a/martin-mbtiles/.sqlx/query-0a4540e8c33c71222a68ff5ecc1a167b406de9961ac3cc69649c6152a6d7a9b7.json b/mbtiles/.sqlx/query-0a4540e8c33c71222a68ff5ecc1a167b406de9961ac3cc69649c6152a6d7a9b7.json similarity index 100% rename from martin-mbtiles/.sqlx/query-0a4540e8c33c71222a68ff5ecc1a167b406de9961ac3cc69649c6152a6d7a9b7.json rename to mbtiles/.sqlx/query-0a4540e8c33c71222a68ff5ecc1a167b406de9961ac3cc69649c6152a6d7a9b7.json diff --git a/martin-mbtiles/.sqlx/query-176e99c6945b0789119d0d21a99de564de47dde1d588f17e68ec58115ac73a39.json b/mbtiles/.sqlx/query-176e99c6945b0789119d0d21a99de564de47dde1d588f17e68ec58115ac73a39.json similarity index 100% rename from martin-mbtiles/.sqlx/query-176e99c6945b0789119d0d21a99de564de47dde1d588f17e68ec58115ac73a39.json rename to mbtiles/.sqlx/query-176e99c6945b0789119d0d21a99de564de47dde1d588f17e68ec58115ac73a39.json diff --git a/martin-mbtiles/.sqlx/query-386a375cf65c3e5aef51deffc99d23bd852ba445c1058aed380fe83bed618c29.json b/mbtiles/.sqlx/query-386a375cf65c3e5aef51deffc99d23bd852ba445c1058aed380fe83bed618c29.json similarity index 100% rename from martin-mbtiles/.sqlx/query-386a375cf65c3e5aef51deffc99d23bd852ba445c1058aed380fe83bed618c29.json rename to mbtiles/.sqlx/query-386a375cf65c3e5aef51deffc99d23bd852ba445c1058aed380fe83bed618c29.json diff --git a/mbtiles/.sqlx/query-428a035a55a07cbb9daac42c3ab05f2a7999788167f41c685af3ca6f5a1359f4.json b/mbtiles/.sqlx/query-428a035a55a07cbb9daac42c3ab05f2a7999788167f41c685af3ca6f5a1359f4.json new file mode 100644 index 000000000..83f5d8a66 --- /dev/null +++ b/mbtiles/.sqlx/query-428a035a55a07cbb9daac42c3ab05f2a7999788167f41c685af3ca6f5a1359f4.json @@ -0,0 +1,12 @@ +{ + "db_name": "SQLite", + "query": "PRAGMA encoding = 'UTF-8'", + "describe": { + "columns": [], + "parameters": { + "Right": 0 + }, + "nullable": [] + }, + "hash": "428a035a55a07cbb9daac42c3ab05f2a7999788167f41c685af3ca6f5a1359f4" +} diff --git a/martin-mbtiles/.sqlx/query-4d952966a8d8a030d2467c0701a6e16068c9897dd25d0ebd32929db9960596b4.json b/mbtiles/.sqlx/query-4d952966a8d8a030d2467c0701a6e16068c9897dd25d0ebd32929db9960596b4.json similarity index 100% rename from martin-mbtiles/.sqlx/query-4d952966a8d8a030d2467c0701a6e16068c9897dd25d0ebd32929db9960596b4.json rename to mbtiles/.sqlx/query-4d952966a8d8a030d2467c0701a6e16068c9897dd25d0ebd32929db9960596b4.json diff --git a/martin-mbtiles/.sqlx/query-5b298df51dccbf0d8a22433a99febc59c27dbf204d09a9c1fb0b3bf9aaad284b.json b/mbtiles/.sqlx/query-5b298df51dccbf0d8a22433a99febc59c27dbf204d09a9c1fb0b3bf9aaad284b.json similarity index 100% rename from martin-mbtiles/.sqlx/query-5b298df51dccbf0d8a22433a99febc59c27dbf204d09a9c1fb0b3bf9aaad284b.json rename to mbtiles/.sqlx/query-5b298df51dccbf0d8a22433a99febc59c27dbf204d09a9c1fb0b3bf9aaad284b.json diff --git a/martin-mbtiles/.sqlx/query-60264fa07915878b3f7ba0067f48c3a379e96acbdf5fc52d14e29bc726fefab7.json b/mbtiles/.sqlx/query-60264fa07915878b3f7ba0067f48c3a379e96acbdf5fc52d14e29bc726fefab7.json similarity index 100% rename from martin-mbtiles/.sqlx/query-60264fa07915878b3f7ba0067f48c3a379e96acbdf5fc52d14e29bc726fefab7.json rename to mbtiles/.sqlx/query-60264fa07915878b3f7ba0067f48c3a379e96acbdf5fc52d14e29bc726fefab7.json diff --git a/mbtiles/.sqlx/query-7341bfc10beb4719811556a57ae8098085994c8fba93e0293359afd43079c50c.json b/mbtiles/.sqlx/query-7341bfc10beb4719811556a57ae8098085994c8fba93e0293359afd43079c50c.json new file mode 100644 index 000000000..2b9d7474d --- /dev/null +++ b/mbtiles/.sqlx/query-7341bfc10beb4719811556a57ae8098085994c8fba93e0293359afd43079c50c.json @@ -0,0 +1,20 @@ +{ + "db_name": "SQLite", + "query": "SELECT (\n -- Has a 'tiles' table\n SELECT COUNT(*) = 1\n FROM sqlite_master\n WHERE name = 'tiles'\n AND type = 'table'\n --\n ) AND (\n -- 'tiles' table's columns and their types are as expected:\n -- 4 columns (zoom_level, tile_column, tile_row, tile_data).\n -- The order is not important\n SELECT COUNT(*) = 4\n FROM pragma_table_info('tiles')\n WHERE ((name = 'zoom_level' AND type = 'INTEGER')\n OR (name = 'tile_column' AND type = 'INTEGER')\n OR (name = 'tile_row' AND type = 'INTEGER')\n OR (name = 'tile_data' AND type = 'BLOB'))\n --\n ) as is_valid;", + "describe": { + "columns": [ + { + "name": "is_valid", + "ordinal": 0, + "type_info": "Int" + } + ], + "parameters": { + "Right": 0 + }, + "nullable": [ + null + ] + }, + "hash": "7341bfc10beb4719811556a57ae8098085994c8fba93e0293359afd43079c50c" +} diff --git a/martin-mbtiles/.sqlx/query-748436831449877b242d6e167a2f8fe1b1e7b6fb87c4e04ad7406a2bbfd35bec.json b/mbtiles/.sqlx/query-748436831449877b242d6e167a2f8fe1b1e7b6fb87c4e04ad7406a2bbfd35bec.json similarity index 100% rename from martin-mbtiles/.sqlx/query-748436831449877b242d6e167a2f8fe1b1e7b6fb87c4e04ad7406a2bbfd35bec.json rename to mbtiles/.sqlx/query-748436831449877b242d6e167a2f8fe1b1e7b6fb87c4e04ad7406a2bbfd35bec.json diff --git a/mbtiles/.sqlx/query-77b2f46851c4e991230ec6a5d33aaca18373bbdd548a8378ae7fbeed351b4b87.json b/mbtiles/.sqlx/query-77b2f46851c4e991230ec6a5d33aaca18373bbdd548a8378ae7fbeed351b4b87.json new file mode 100644 index 000000000..acb0b2ecc --- /dev/null +++ b/mbtiles/.sqlx/query-77b2f46851c4e991230ec6a5d33aaca18373bbdd548a8378ae7fbeed351b4b87.json @@ -0,0 +1,20 @@ +{ + "db_name": "SQLite", + "query": "SELECT (\n -- Has a 'tiles_with_hash' table\n SELECT COUNT(*) = 1\n FROM sqlite_master\n WHERE name = 'tiles_with_hash'\n AND type = 'table'\n --\n ) as is_valid;", + "describe": { + "columns": [ + { + "name": "is_valid", + "ordinal": 0, + "type_info": "Int" + } + ], + "parameters": { + "Right": 0 + }, + "nullable": [ + null + ] + }, + "hash": "77b2f46851c4e991230ec6a5d33aaca18373bbdd548a8378ae7fbeed351b4b87" +} diff --git a/mbtiles/.sqlx/query-809e89c3b223e28c6716d405e13ba30fbf018805fe9ca2acd2b2e225183d1f13.json b/mbtiles/.sqlx/query-809e89c3b223e28c6716d405e13ba30fbf018805fe9ca2acd2b2e225183d1f13.json new file mode 100644 index 000000000..faf6b51f7 --- /dev/null +++ b/mbtiles/.sqlx/query-809e89c3b223e28c6716d405e13ba30fbf018805fe9ca2acd2b2e225183d1f13.json @@ -0,0 +1,20 @@ +{ + "db_name": "SQLite", + "query": "SELECT (\n -- Has a 'map' table\n SELECT COUNT(*) = 1\n FROM sqlite_master\n WHERE name = 'map'\n AND type = 'table'\n --\n ) AND (\n -- 'map' table's columns and their types are as expected:\n -- 4 columns (zoom_level, tile_column, tile_row, tile_id).\n -- The order is not important\n SELECT COUNT(*) = 4\n FROM pragma_table_info('map')\n WHERE ((name = 'zoom_level' AND type = 'INTEGER')\n OR (name = 'tile_column' AND type = 'INTEGER')\n OR (name = 'tile_row' AND type = 'INTEGER')\n OR (name = 'tile_id' AND type = 'TEXT'))\n --\n ) AND (\n -- Has a 'images' table\n SELECT COUNT(*) = 1\n FROM sqlite_master\n WHERE name = 'images'\n AND type = 'table'\n --\n ) AND (\n -- 'images' table's columns and their types are as expected:\n -- 2 columns (tile_id, tile_data).\n -- The order is not important\n SELECT COUNT(*) = 2\n FROM pragma_table_info('images')\n WHERE ((name = 'tile_id' AND type = 'TEXT')\n OR (name = 'tile_data' AND type = 'BLOB'))\n --\n ) AS is_valid;", + "describe": { + "columns": [ + { + "name": "is_valid", + "ordinal": 0, + "type_info": "Int" + } + ], + "parameters": { + "Right": 0 + }, + "nullable": [ + null + ] + }, + "hash": "809e89c3b223e28c6716d405e13ba30fbf018805fe9ca2acd2b2e225183d1f13" +} diff --git a/mbtiles/.sqlx/query-85b46712c445679053e768cc98b22ea61633c21afb45d3d2b9aeec068d72cce0.json b/mbtiles/.sqlx/query-85b46712c445679053e768cc98b22ea61633c21afb45d3d2b9aeec068d72cce0.json new file mode 100644 index 000000000..45fdc4f2a --- /dev/null +++ b/mbtiles/.sqlx/query-85b46712c445679053e768cc98b22ea61633c21afb45d3d2b9aeec068d72cce0.json @@ -0,0 +1,20 @@ +{ + "db_name": "SQLite", + "query": "SELECT (\n -- 'tiles_with_hash' table or view columns and their types are as expected:\n -- 5 columns (zoom_level, tile_column, tile_row, tile_data, tile_hash).\n -- The order is not important\n SELECT COUNT(*) = 5\n FROM pragma_table_info('tiles_with_hash')\n WHERE ((name = 'zoom_level' AND type = 'INTEGER')\n OR (name = 'tile_column' AND type = 'INTEGER')\n OR (name = 'tile_row' AND type = 'INTEGER')\n OR (name = 'tile_data' AND type = 'BLOB')\n OR (name = 'tile_hash' AND type = 'TEXT'))\n --\n ) as is_valid;", + "describe": { + "columns": [ + { + "name": "is_valid", + "ordinal": 0, + "type_info": "Int" + } + ], + "parameters": { + "Right": 0 + }, + "nullable": [ + null + ] + }, + "hash": "85b46712c445679053e768cc98b22ea61633c21afb45d3d2b9aeec068d72cce0" +} diff --git a/martin-mbtiles/.sqlx/query-c8ef3dc53f1f6fd80e266aab2bf007c66a1cc45bdfcdc38f93d6ba759125a9aa.json b/mbtiles/.sqlx/query-c8ef3dc53f1f6fd80e266aab2bf007c66a1cc45bdfcdc38f93d6ba759125a9aa.json similarity index 100% rename from martin-mbtiles/.sqlx/query-c8ef3dc53f1f6fd80e266aab2bf007c66a1cc45bdfcdc38f93d6ba759125a9aa.json rename to mbtiles/.sqlx/query-c8ef3dc53f1f6fd80e266aab2bf007c66a1cc45bdfcdc38f93d6ba759125a9aa.json diff --git a/martin-mbtiles/.sqlx/query-d6ac76a234c97d0dc1fc4331d8b2cd90903d5401f8f0956245e5163bedd23a4d.json b/mbtiles/.sqlx/query-d6ac76a234c97d0dc1fc4331d8b2cd90903d5401f8f0956245e5163bedd23a4d.json similarity index 100% rename from martin-mbtiles/.sqlx/query-d6ac76a234c97d0dc1fc4331d8b2cd90903d5401f8f0956245e5163bedd23a4d.json rename to mbtiles/.sqlx/query-d6ac76a234c97d0dc1fc4331d8b2cd90903d5401f8f0956245e5163bedd23a4d.json diff --git a/martin-mbtiles/.sqlx/query-f547ff198e3bb604550a3f191e4ad8c695c4c2350f294aefd210eccec603d905.json b/mbtiles/.sqlx/query-f547ff198e3bb604550a3f191e4ad8c695c4c2350f294aefd210eccec603d905.json similarity index 100% rename from martin-mbtiles/.sqlx/query-f547ff198e3bb604550a3f191e4ad8c695c4c2350f294aefd210eccec603d905.json rename to mbtiles/.sqlx/query-f547ff198e3bb604550a3f191e4ad8c695c4c2350f294aefd210eccec603d905.json diff --git a/martin-mbtiles/Cargo.toml b/mbtiles/Cargo.toml similarity index 69% rename from martin-mbtiles/Cargo.toml rename to mbtiles/Cargo.toml index 3392c25a9..c31ab35c6 100644 --- a/martin-mbtiles/Cargo.toml +++ b/mbtiles/Cargo.toml @@ -1,28 +1,27 @@ [package] -name = "martin-mbtiles" -version = "0.4.0" +name = "mbtiles" +version = "0.7.2" authors = ["Yuri Astrakhan ", "MapLibre contributors"] description = "A simple low-level MbTiles access and processing library, with some tile format detection and other relevant heuristics." keywords = ["mbtiles", "maps", "tiles", "mvt", "tilejson"] +categories = ["science::geo", "database"] edition.workspace = true license.workspace = true repository.workspace = true rust-version.workspace = true [features] -# TODO: Disable "cli" feature in default builds -default = ["cli", "native-tls"] -cli = ["dep:anyhow", "dep:clap", "dep:tokio"] -# One of the following two must be used -native-tls = ["sqlx/runtime-tokio-native-tls"] -rustls = ["sqlx/runtime-tokio-rustls"] +default = ["cli"] +cli = ["dep:anyhow", "dep:clap", "dep:env_logger", "dep:serde_yaml", "dep:tokio"] [dependencies] +enum-display.workspace = true futures.workspace = true log.workspace = true martin-tile-utils.workspace = true serde.workspace = true serde_json.workspace = true +sqlite-hashes.workspace = true sqlx.workspace = true thiserror.workspace = true tilejson.workspace = true @@ -30,13 +29,18 @@ tilejson.workspace = true # Bin dependencies anyhow = { workspace = true, optional = true } clap = { workspace = true, optional = true } -serde_yaml.workspace = true -sqlite-hashes.workspace = true +env_logger = { workspace = true, optional = true } +serde_yaml = { workspace = true, optional = true } tokio = { workspace = true, features = ["rt-multi-thread"], optional = true } [dev-dependencies] # For testing, might as well use the same async framework as the Martin itself actix-rt.workspace = true +ctor.workspace = true +env_logger.workspace = true +insta = { workspace = true, features = ["toml"] } +pretty_assertions.workspace = true +rstest.workspace = true [lib] path = "src/lib.rs" diff --git a/mbtiles/README.md b/mbtiles/README.md new file mode 100644 index 000000000..92e29a62a --- /dev/null +++ b/mbtiles/README.md @@ -0,0 +1,31 @@ +# mbtiles + +[![Book](https://img.shields.io/badge/docs-Book-informational)](https://maplibre.org/martin/50-tools.html) +[![docs.rs docs](https://docs.rs/mbtiles/badge.svg)](https://docs.rs/mbtiles) +[![Slack chat](https://img.shields.io/badge/Chat-on%20Slack-blueviolet)](https://slack.openstreetmap.us/) +[![GitHub](https://img.shields.io/badge/github-maplibre/martin-8da0cb?logo=github)](https://github.com/maplibre/martin) +[![crates.io version](https://img.shields.io/crates/v/mbtiles.svg)](https://crates.io/crates/mbtiles) +[![CI build](https://github.com/maplibre/martin/actions/workflows/ci.yml/badge.svg)](https://github.com/maplibre/martin/actions) + +A library to help tile servers like [Martin](https://maplibre.org/martin) work with [MBTiles](https://github.com/mapbox/mbtiles-spec) files. When using as a lib, you may want to disable default features (i.e. the unused "cli" feature). + +This crate also has a small utility that allows users to interact with the `*.mbtiles` files from the command line. See [tools](https://maplibre.org/martin/50-tools.html) documentation for more information. + +### Development + +Any changes to SQL commands require running of `just prepare-sqlite`. This will install `cargo sqlx` command if it is not already installed, and update the `./sqlx-data.json` file. + +## License + +Licensed under either of + +* Apache License, Version 2.0 ([LICENSE-APACHE](LICENSE-APACHE) or ) +* MIT license ([LICENSE-MIT](LICENSE-MIT) or ) + at your option. + +### Contribution + +Unless you explicitly state otherwise, any contribution intentionally +submitted for inclusion in the work by you, as defined in the +Apache-2.0 license, shall be dual licensed as above, without any +additional terms or conditions. diff --git a/martin-mbtiles/src/bin/main.rs b/mbtiles/src/bin/main.rs similarity index 71% rename from martin-mbtiles/src/bin/main.rs rename to mbtiles/src/bin/main.rs index 7d8721a05..9643e5f54 100644 --- a/martin-mbtiles/src/bin/main.rs +++ b/mbtiles/src/bin/main.rs @@ -1,12 +1,8 @@ use std::path::{Path, PathBuf}; -use anyhow::Result; use clap::{Parser, Subcommand}; -use martin_mbtiles::{ - apply_mbtiles_diff, copy_mbtiles_file, IntegrityCheckType, Mbtiles, TileCopierOptions, -}; -use sqlx::sqlite::SqliteConnectOptions; -use sqlx::{Connection, SqliteConnection}; +use log::error; +use mbtiles::{apply_patch, IntegrityCheckType, MbtResult, Mbtiles, MbtilesCopier}; #[derive(Parser, PartialEq, Eq, Debug)] #[command( @@ -50,10 +46,10 @@ enum Commands { }, /// Copy tiles from one mbtiles file to another. #[command(name = "copy")] - Copy(TileCopierOptions), + Copy(MbtilesCopier), /// Apply diff file generated from 'copy' command - #[command(name = "apply-diff")] - ApplyDiff { + #[command(name = "apply-patch", alias = "apply-diff")] + ApplyPatch { /// MBTiles file to apply diff to src_file: PathBuf, /// Diff file @@ -67,16 +63,30 @@ enum Commands { /// Value to specify the extent of the SQLite integrity check performed #[arg(long, value_enum, default_value_t=IntegrityCheckType::default())] integrity_check: IntegrityCheckType, - /// Generate a hash of the tile data hashes and store under the 'agg_tiles_hash' key in metadata + /// Update `agg_tiles_hash` metadata value instead of using it to validate if the entire tile store is valid. #[arg(long)] update_agg_tiles_hash: bool, }, } #[tokio::main] -async fn main() -> Result<()> { - let args = Args::parse(); +async fn main() { + let env = env_logger::Env::default().default_filter_or("info"); + env_logger::Builder::from_env(env) + .format_indent(None) + .format_module_path(false) + .format_target(false) + .format_timestamp(None) + .init(); + + if let Err(err) = main_int().await { + error!("{err}"); + std::process::exit(1); + } +} +async fn main_int() -> anyhow::Result<()> { + let args = Args::parse(); match args.command { Commands::MetaAll { file } => { meta_print_all(file.as_path()).await?; @@ -85,72 +95,51 @@ async fn main() -> Result<()> { meta_get_value(file.as_path(), &key).await?; } Commands::MetaSetValue { file, key, value } => { - meta_set_value(file.as_path(), &key, value).await?; + meta_set_value(file.as_path(), &key, value.as_deref()).await?; } Commands::Copy(opts) => { - copy_mbtiles_file(opts).await?; + opts.run().await?; } - Commands::ApplyDiff { + Commands::ApplyPatch { src_file, diff_file, } => { - apply_mbtiles_diff(src_file, diff_file).await?; + apply_patch(src_file, diff_file).await?; } Commands::Validate { file, integrity_check, update_agg_tiles_hash, } => { - validate_mbtiles(file.as_path(), integrity_check, update_agg_tiles_hash).await?; + let mbt = Mbtiles::new(file.as_path())?; + mbt.validate(integrity_check, update_agg_tiles_hash).await?; } } Ok(()) } -async fn meta_print_all(file: &Path) -> Result<()> { +async fn meta_print_all(file: &Path) -> anyhow::Result<()> { let mbt = Mbtiles::new(file)?; - let opt = SqliteConnectOptions::new().filename(file).read_only(true); - let mut conn = SqliteConnection::connect_with(&opt).await?; + let mut conn = mbt.open_readonly().await?; let metadata = mbt.get_metadata(&mut conn).await?; println!("{}", serde_yaml::to_string(&metadata)?); Ok(()) } -async fn meta_get_value(file: &Path, key: &str) -> Result<()> { +async fn meta_get_value(file: &Path, key: &str) -> MbtResult<()> { let mbt = Mbtiles::new(file)?; - let opt = SqliteConnectOptions::new().filename(file).read_only(true); - let mut conn = SqliteConnection::connect_with(&opt).await?; + let mut conn = mbt.open_readonly().await?; if let Some(s) = mbt.get_metadata_value(&mut conn, key).await? { println!("{s}"); } Ok(()) } -async fn meta_set_value(file: &Path, key: &str, value: Option) -> Result<()> { +async fn meta_set_value(file: &Path, key: &str, value: Option<&str>) -> MbtResult<()> { let mbt = Mbtiles::new(file)?; - let opt = SqliteConnectOptions::new().filename(file); - let mut conn = SqliteConnection::connect_with(&opt).await?; - mbt.set_metadata_value(&mut conn, key, value).await?; - Ok(()) -} - -async fn validate_mbtiles( - file: &Path, - check_type: IntegrityCheckType, - update_agg_tiles_hash: bool, -) -> Result<()> { - let mbt = Mbtiles::new(file)?; - let opt = SqliteConnectOptions::new().filename(file).read_only(true); - let mut conn = SqliteConnection::connect_with(&opt).await?; - mbt.check_integrity(&mut conn, check_type).await?; - mbt.check_each_tile_hash(&mut conn).await?; - if update_agg_tiles_hash { - mbt.update_agg_tiles_hash(&mut conn).await?; - } else { - mbt.check_agg_tile_hashes(&mut conn).await?; - } - Ok(()) + let mut conn = mbt.open().await?; + mbt.set_metadata_value(&mut conn, key, value).await } #[cfg(test)] @@ -159,9 +148,9 @@ mod tests { use clap::error::ErrorKind; use clap::Parser; - use martin_mbtiles::{CopyDuplicateMode, TileCopierOptions}; + use mbtiles::{CopyDuplicateMode, MbtilesCopier}; - use crate::Commands::{ApplyDiff, Copy, MetaGetValue, MetaSetValue, Validate}; + use crate::Commands::{ApplyPatch, Copy, MetaGetValue, MetaSetValue, Validate}; use crate::{Args, IntegrityCheckType}; #[test] @@ -180,7 +169,7 @@ mod tests { Args::parse_from(["mbtiles", "copy", "src_file", "dst_file"]), Args { verbose: false, - command: Copy(TileCopierOptions::new( + command: Copy(MbtilesCopier::new( PathBuf::from("src_file"), PathBuf::from("dst_file") )) @@ -190,24 +179,25 @@ mod tests { #[test] fn test_copy_min_max_zoom_arguments() { + let mut opt = MbtilesCopier::new(PathBuf::from("src_file"), PathBuf::from("dst_file")); + opt.min_zoom = Some(1); + opt.max_zoom = Some(100); + + let args = Args::parse_from([ + "mbtiles", + "copy", + "src_file", + "dst_file", + "--max-zoom", + "100", + "--min-zoom", + "1", + ]); assert_eq!( - Args::parse_from([ - "mbtiles", - "copy", - "src_file", - "dst_file", - "--max-zoom", - "100", - "--min-zoom", - "1" - ]), + args, Args { verbose: false, - command: Copy( - TileCopierOptions::new(PathBuf::from("src_file"), PathBuf::from("dst_file")) - .min_zoom(Some(1)) - .max_zoom(Some(100)) - ) + command: Copy(opt) } ); } @@ -252,6 +242,8 @@ mod tests { #[test] fn test_copy_zoom_levels_arguments() { + let mut opt = MbtilesCopier::new(PathBuf::from("src_file"), PathBuf::from("dst_file")); + opt.zoom_levels.extend(&[1, 3, 7]); assert_eq!( Args::parse_from([ "mbtiles", @@ -263,16 +255,15 @@ mod tests { ]), Args { verbose: false, - command: Copy( - TileCopierOptions::new(PathBuf::from("src_file"), PathBuf::from("dst_file")) - .zoom_levels(vec![1, 3, 7]) - ) + command: Copy(opt) } ); } #[test] fn test_copy_diff_with_file_arguments() { + let mut opt = MbtilesCopier::new(PathBuf::from("src_file"), PathBuf::from("dst_file")); + opt.diff_with_file = Some(PathBuf::from("no_file")); assert_eq!( Args::parse_from([ "mbtiles", @@ -284,16 +275,15 @@ mod tests { ]), Args { verbose: false, - command: Copy( - TileCopierOptions::new(PathBuf::from("src_file"), PathBuf::from("dst_file")) - .diff_with_file(PathBuf::from("no_file")) - ) + command: Copy(opt) } ); } #[test] fn test_copy_diff_with_override_copy_duplicate_mode() { + let mut opt = MbtilesCopier::new(PathBuf::from("src_file"), PathBuf::from("dst_file")); + opt.on_duplicate = CopyDuplicateMode::Override; assert_eq!( Args::parse_from([ "mbtiles", @@ -305,16 +295,15 @@ mod tests { ]), Args { verbose: false, - command: Copy( - TileCopierOptions::new(PathBuf::from("src_file"), PathBuf::from("dst_file")) - .on_duplicate(CopyDuplicateMode::Override) - ) + command: Copy(opt) } ); } #[test] fn test_copy_diff_with_ignore_copy_duplicate_mode() { + let mut opt = MbtilesCopier::new(PathBuf::from("src_file"), PathBuf::from("dst_file")); + opt.on_duplicate = CopyDuplicateMode::Ignore; assert_eq!( Args::parse_from([ "mbtiles", @@ -326,16 +315,15 @@ mod tests { ]), Args { verbose: false, - command: Copy( - TileCopierOptions::new(PathBuf::from("src_file"), PathBuf::from("dst_file")) - .on_duplicate(CopyDuplicateMode::Ignore) - ) + command: Copy(opt) } ); } #[test] fn test_copy_diff_with_abort_copy_duplicate_mode() { + let mut opt = MbtilesCopier::new(PathBuf::from("src_file"), PathBuf::from("dst_file")); + opt.on_duplicate = CopyDuplicateMode::Abort; assert_eq!( Args::parse_from([ "mbtiles", @@ -347,10 +335,7 @@ mod tests { ]), Args { verbose: false, - command: Copy( - TileCopierOptions::new(PathBuf::from("src_file"), PathBuf::from("dst_file")) - .on_duplicate(CopyDuplicateMode::Abort) - ) + command: Copy(opt) } ); } @@ -425,7 +410,7 @@ mod tests { Args::parse_from(["mbtiles", "apply-diff", "src_file", "diff_file"]), Args { verbose: false, - command: ApplyDiff { + command: ApplyPatch { src_file: PathBuf::from("src_file"), diff_file: PathBuf::from("diff_file"), } diff --git a/mbtiles/src/copier.rs b/mbtiles/src/copier.rs new file mode 100644 index 000000000..d8c165698 --- /dev/null +++ b/mbtiles/src/copier.rs @@ -0,0 +1,924 @@ +use std::collections::HashSet; +use std::path::PathBuf; + +#[cfg(feature = "cli")] +use clap::{builder::ValueParser, error::ErrorKind, Args, ValueEnum}; +use enum_display::EnumDisplay; +use log::{debug, info}; +use sqlite_hashes::rusqlite; +use sqlite_hashes::rusqlite::params_from_iter; +use sqlx::{query, Executor as _, Row, SqliteConnection}; + +use crate::errors::MbtResult; +use crate::mbtiles::MbtType::{Flat, FlatWithHash, Normalized}; +use crate::mbtiles::{MbtType, MbtTypeCli}; +use crate::queries::{ + create_flat_tables, create_flat_with_hash_tables, create_normalized_tables, + create_tiles_with_hash_view, detach_db, is_empty_database, +}; +use crate::{MbtError, Mbtiles, AGG_TILES_HASH, AGG_TILES_HASH_IN_DIFF}; + +#[derive(PartialEq, Eq, Default, Debug, Clone, EnumDisplay)] +#[enum_display(case = "Kebab")] +#[cfg_attr(feature = "cli", derive(ValueEnum))] +pub enum CopyDuplicateMode { + #[default] + Override, + Ignore, + Abort, +} + +#[derive(Clone, Default, PartialEq, Eq, Debug)] +#[cfg_attr(feature = "cli", derive(Args))] +pub struct MbtilesCopier { + /// MBTiles file to read from + pub src_file: PathBuf, + /// MBTiles file to write to + pub dst_file: PathBuf, + /// Output format of the destination file, ignored if the file exists. If not specified, defaults to the type of source + #[cfg_attr(feature = "cli", arg(long = "dst_type", value_enum))] + pub dst_type_cli: Option, + /// Destination type with options + #[cfg_attr(feature = "cli", arg(skip))] + pub dst_type: Option, + /// Specify copying behaviour when tiles with duplicate (zoom_level, tile_column, tile_row) values are found + #[cfg_attr(feature = "cli", arg(long, value_enum, default_value_t = CopyDuplicateMode::default()))] + pub on_duplicate: CopyDuplicateMode, + /// Minimum zoom level to copy + #[cfg_attr(feature = "cli", arg(long, conflicts_with("zoom_levels")))] + pub min_zoom: Option, + /// Maximum zoom level to copy + #[cfg_attr(feature = "cli", arg(long, conflicts_with("zoom_levels")))] + pub max_zoom: Option, + /// List of zoom levels to copy + #[cfg_attr(feature = "cli", arg(long, value_parser(ValueParser::new(HashSetValueParser{})), default_value=""))] + pub zoom_levels: HashSet, + /// Compare source file with this file, and only copy non-identical tiles to destination. + /// It should be later possible to run `mbtiles apply-diff SRC_FILE DST_FILE` to get the same DIFF file. + #[cfg_attr(feature = "cli", arg(long, conflicts_with("apply_patch")))] + pub diff_with_file: Option, + /// Compare source file with this file, and only copy non-identical tiles to destination. + /// It should be later possible to run `mbtiles apply-diff SRC_FILE DST_FILE` to get the same DIFF file. + #[cfg_attr(feature = "cli", arg(long, conflicts_with("diff_with_file")))] + pub apply_patch: Option, + /// Skip generating a global hash for mbtiles validation. By default, `mbtiles` will compute `agg_tiles_hash` metadata value. + #[cfg_attr(feature = "cli", arg(long))] + pub skip_agg_tiles_hash: bool, +} + +#[cfg(feature = "cli")] +#[derive(Clone)] +struct HashSetValueParser; + +#[cfg(feature = "cli")] +impl clap::builder::TypedValueParser for HashSetValueParser { + type Value = HashSet; + + fn parse_ref( + &self, + _cmd: &clap::Command, + _arg: Option<&clap::Arg>, + value: &std::ffi::OsStr, + ) -> Result { + let mut result = HashSet::::new(); + let values = value + .to_str() + .ok_or(clap::Error::new(ErrorKind::ValueValidation))? + .trim(); + if !values.is_empty() { + for val in values.split(',') { + result.insert( + val.trim() + .parse::() + .map_err(|_| clap::Error::new(ErrorKind::ValueValidation))?, + ); + } + } + Ok(result) + } +} + +#[derive(Clone, Debug)] +struct MbtileCopierInt { + src_mbtiles: Mbtiles, + dst_mbtiles: Mbtiles, + options: MbtilesCopier, +} + +impl MbtilesCopier { + #[must_use] + pub fn new(src_filepath: PathBuf, dst_filepath: PathBuf) -> Self { + Self { + src_file: src_filepath, + dst_file: dst_filepath, + zoom_levels: HashSet::new(), + dst_type_cli: None, + dst_type: None, + on_duplicate: CopyDuplicateMode::Override, + min_zoom: None, + max_zoom: None, + diff_with_file: None, + apply_patch: None, + skip_agg_tiles_hash: false, + } + } + + pub async fn run(self) -> MbtResult { + MbtileCopierInt::new(self)?.run().await + } + + pub(crate) fn dst_type(&self) -> Option { + self.dst_type.or_else(|| { + self.dst_type_cli.map(|t| match t { + MbtTypeCli::Flat => Flat, + MbtTypeCli::FlatWithHash => FlatWithHash, + MbtTypeCli::Normalized => Normalized { hash_view: true }, + }) + }) + } +} + +impl MbtileCopierInt { + pub fn new(options: MbtilesCopier) -> MbtResult { + if options.apply_patch.is_some() && options.diff_with_file.is_some() { + return Err(MbtError::CannotApplyPatchAndDiff); + } + // We may want to resolve the files to absolute paths here, but will need to avoid various non-file cases + if options.src_file == options.dst_file { + return Err(MbtError::SameSourceAndDestination(options.src_file)); + } + if let Some(diff_file) = &options.diff_with_file { + if options.src_file == *diff_file || options.dst_file == *diff_file { + return Err(MbtError::SameDiffAndSourceOrDestination(options.src_file)); + } + } + if let Some(patch_file) = &options.apply_patch { + if options.src_file == *patch_file || options.dst_file == *patch_file { + return Err(MbtError::SameDiffAndSourceOrDestination(options.src_file)); + } + } + + Ok(MbtileCopierInt { + src_mbtiles: Mbtiles::new(&options.src_file)?, + dst_mbtiles: Mbtiles::new(&options.dst_file)?, + options, + }) + } + + pub async fn run(self) -> MbtResult { + let dif = match (&self.options.diff_with_file, &self.options.apply_patch) { + (Some(dif_file), None) | (None, Some(dif_file)) => { + let dif_mbt = Mbtiles::new(dif_file)?; + let dif_type = dif_mbt.open_and_detect_type().await?; + Some((dif_mbt, dif_type, dif_type)) + } + (Some(_), Some(_)) => unreachable!(), // validated in the Self::new + _ => None, + }; + + // src and diff file connections are not needed later, as they will be attached to the dst file + let src_mbt = &self.src_mbtiles; + let dst_mbt = &self.dst_mbtiles; + + let src_type = src_mbt.open_and_detect_type().await?; + let mut conn = dst_mbt.open_or_new().await?; + let is_empty_db = is_empty_database(&mut conn).await?; + src_mbt.attach_to(&mut conn, "sourceDb").await?; + + let dst_type: MbtType; + if let Some((dif_mbt, dif_type, _)) = &dif { + if !is_empty_db { + return Err(MbtError::NonEmptyTargetFile(self.options.dst_file)); + } + dst_type = self.options.dst_type().unwrap_or(src_type); + dif_mbt.attach_to(&mut conn, "diffDb").await?; + let dif_path = dif_mbt.filepath(); + if self.options.diff_with_file.is_some() { + info!("Comparing {src_mbt} ({src_type}) and {dif_path} ({dif_type}) into a new file {dst_mbt} ({dst_type})"); + } else { + info!("Applying patch from {dif_path} ({dif_type}) to {src_mbt} ({src_type}) into a new file {dst_mbt} ({dst_type})"); + } + } else if is_empty_db { + dst_type = self.options.dst_type().unwrap_or(src_type); + info!("Copying {src_mbt} ({src_type}) to a new file {dst_mbt} ({dst_type})"); + } else { + dst_type = self.validate_dst_type(dst_mbt.detect_type(&mut conn).await?)?; + info!("Copying {src_mbt} ({src_type}) to an existing file {dst_mbt} ({dst_type})"); + } + + if is_empty_db { + self.init_new_schema(&mut conn, src_type, dst_type).await?; + } + + let select_from = if let Some((_, dif_type, _)) = &dif { + if self.options.diff_with_file.is_some() { + Self::get_select_from_with_diff(*dif_type, dst_type) + } else { + Self::get_select_from_apply_patch(src_type, *dif_type, dst_type) + } + } else { + Self::get_select_from(src_type, dst_type).to_string() + }; + + let (where_clause, query_args) = self.get_where_clause(); + let select_from = format!("{select_from} {where_clause}"); + let (on_dupl, sql_cond) = self.get_on_duplicate_sql(dst_type); + + debug!("Copying tiles with 'INSERT {on_dupl}' {src_type} -> {dst_type} ({sql_cond})"); + // Make sure not to execute any other queries while the handle is locked + let mut handle_lock = conn.lock_handle().await?; + let handle = handle_lock.as_raw_handle().as_ptr(); + + // SAFETY: this is safe as long as handle_lock is valid. We will drop the lock. + let rusqlite_conn = unsafe { rusqlite::Connection::from_handle(handle) }?; + + match dst_type { + Flat => { + let sql = format!( + " + INSERT {on_dupl} INTO tiles + (zoom_level, tile_column, tile_row, tile_data) + {select_from} {sql_cond}" + ); + debug!("Copying to {dst_type} with {sql} {query_args:?}"); + rusqlite_conn.execute(&sql, params_from_iter(query_args))? + } + FlatWithHash => { + let sql = format!( + " + INSERT {on_dupl} INTO tiles_with_hash + (zoom_level, tile_column, tile_row, tile_data, tile_hash) + {select_from} {sql_cond}" + ); + debug!("Copying to {dst_type} with {sql} {query_args:?}"); + rusqlite_conn.execute(&sql, params_from_iter(query_args))? + } + Normalized { .. } => { + let sql = format!( + " + INSERT OR IGNORE INTO images + (tile_id, tile_data) + SELECT tile_hash as tile_id, tile_data + FROM ({select_from})" + ); + debug!("Copying to {dst_type} with {sql} {query_args:?}"); + rusqlite_conn.execute(&sql, params_from_iter(&query_args))?; + + let sql = format!( + " + INSERT {on_dupl} INTO map + (zoom_level, tile_column, tile_row, tile_id) + SELECT zoom_level, tile_column, tile_row, tile_hash as tile_id + FROM ({select_from} {sql_cond})" + ); + debug!("Copying to {dst_type} with {sql} {query_args:?}"); + rusqlite_conn.execute(&sql, params_from_iter(query_args))? + } + }; + + let sql; + if dif.is_some() { + // Insert all rows from diffDb.metadata if they do not exist or are different in sourceDb.metadata. + // Also insert all names from sourceDb.metadata that do not exist in diffDb.metadata, with their value set to NULL. + // Rename agg_tiles_hash to agg_tiles_hash_in_diff because agg_tiles_hash will be auto-added later + if self.options.diff_with_file.is_some() { + sql = format!( + " + INSERT {on_dupl} INTO metadata (name, value) + SELECT IIF(name = '{AGG_TILES_HASH}','{AGG_TILES_HASH_IN_DIFF}', name) as name + , value + FROM ( + SELECT COALESCE(difMD.name, srcMD.name) as name + , difMD.value as value + FROM sourceDb.metadata AS srcMD FULL JOIN diffDb.metadata AS difMD + ON srcMD.name = difMD.name + WHERE srcMD.value != difMD.value OR srcMD.value ISNULL OR difMD.value ISNULL + ) joinedMD + WHERE name != '{AGG_TILES_HASH_IN_DIFF}'" + ); + } else { + sql = format!( + " + INSERT {on_dupl} INTO metadata (name, value) + SELECT IIF(name = '{AGG_TILES_HASH_IN_DIFF}','{AGG_TILES_HASH}', name) as name + , value + FROM ( + SELECT COALESCE(srcMD.name, difMD.name) as name + , COALESCE(difMD.value, srcMD.value) as value + FROM sourceDb.metadata AS srcMD FULL JOIN diffDb.metadata AS difMD + ON srcMD.name = difMD.name + WHERE difMD.name ISNULL OR difMD.value NOTNULL + ) joinedMD + WHERE name != '{AGG_TILES_HASH}'" + ); + } + if self.options.diff_with_file.is_some() { + debug!("Copying metadata, taking into account diff file with {sql}"); + } else { + debug!("Copying metadata, and applying the diff file with {sql}"); + } + } else { + sql = format!( + " + INSERT {on_dupl} INTO metadata SELECT name, value FROM sourceDb.metadata" + ); + debug!("Copying metadata with {sql}"); + } + rusqlite_conn.execute(&sql, [])?; + + // SAFETY: must drop rusqlite_conn before handle_lock, or place the code since lock in a separate scope + drop(rusqlite_conn); + drop(handle_lock); + + if !self.options.skip_agg_tiles_hash { + dst_mbt.update_agg_tiles_hash(&mut conn).await?; + } + + detach_db(&mut conn, "sourceDb").await?; + // Ignore error because we might not have attached diffDb + let _ = detach_db(&mut conn, "diffDb").await; + + Ok(conn) + } + + /// Check if the detected destination file type matches the one given by the options + fn validate_dst_type(&self, dst_type: MbtType) -> MbtResult { + if let Some(cli) = self.options.dst_type() { + match (cli, dst_type) { + (Flat, Flat) + | (FlatWithHash, FlatWithHash) + | (Normalized { .. }, Normalized { .. }) => {} + (cli, dst) => { + return Err(MbtError::MismatchedTargetType( + self.options.dst_file.to_path_buf(), + dst, + cli, + )) + } + } + } + Ok(dst_type) + } + + async fn init_new_schema( + &self, + conn: &mut SqliteConnection, + src: MbtType, + dst: MbtType, + ) -> MbtResult<()> { + debug!("Resetting PRAGMA settings and vacuuming"); + query!("PRAGMA page_size = 512").execute(&mut *conn).await?; + query!("PRAGMA encoding = 'UTF-8'") + .execute(&mut *conn) + .await?; + query!("VACUUM").execute(&mut *conn).await?; + + if src == dst { + // DB objects must be created in a specific order: tables, views, triggers, indexes. + debug!("Copying DB schema verbatim"); + let sql_objects = conn + .fetch_all( + "SELECT sql + FROM sourceDb.sqlite_schema + WHERE tbl_name IN ('metadata', 'tiles', 'map', 'images', 'tiles_with_hash') + AND type IN ('table', 'view', 'trigger', 'index') + ORDER BY CASE + WHEN type = 'table' THEN 1 + WHEN type = 'view' THEN 2 + WHEN type = 'trigger' THEN 3 + WHEN type = 'index' THEN 4 + ELSE 5 END;", + ) + .await?; + + for row in sql_objects { + query(row.get(0)).execute(&mut *conn).await?; + } + } else { + match dst { + Flat => create_flat_tables(&mut *conn).await?, + FlatWithHash => create_flat_with_hash_tables(&mut *conn).await?, + Normalized { .. } => create_normalized_tables(&mut *conn).await?, + }; + }; + + if dst.is_normalized() { + // Some normalized mbtiles files might not have this view, so even if src == dst, it might not exist + create_tiles_with_hash_view(&mut *conn).await?; + } + + Ok(()) + } + + /// Returns (ON DUPLICATE SQL, WHERE condition SQL) + fn get_on_duplicate_sql(&self, dst_type: MbtType) -> (String, String) { + match &self.options.on_duplicate { + CopyDuplicateMode::Override => ("OR REPLACE".to_string(), String::new()), + CopyDuplicateMode::Ignore => ("OR IGNORE".to_string(), String::new()), + CopyDuplicateMode::Abort => ("OR ABORT".to_string(), { + let (main_table, tile_identifier) = match dst_type { + Flat => ("tiles", "tile_data"), + FlatWithHash => ("tiles_with_hash", "tile_data"), + Normalized { .. } => ("map", "tile_id"), + }; + + format!( + "AND NOT EXISTS ( + SELECT 1 + FROM {main_table} + WHERE + {main_table}.zoom_level = sourceDb.{main_table}.zoom_level + AND {main_table}.tile_column = sourceDb.{main_table}.tile_column + AND {main_table}.tile_row = sourceDb.{main_table}.tile_row + AND {main_table}.{tile_identifier} != sourceDb.{main_table}.{tile_identifier} + )" + ) + }), + } + } + + fn get_select_from_apply_patch( + src_type: MbtType, + dif_type: MbtType, + dst_type: MbtType, + ) -> String { + fn query_for_dst(frm_db: &'static str, frm_type: MbtType, to_type: MbtType) -> String { + match to_type { + Flat => format!("{frm_db}.tiles"), + FlatWithHash => match frm_type { + Flat => format!( + " + (SELECT zoom_level, tile_column, tile_row, tile_data, md5_hex(tile_data) AS tile_hash + FROM {frm_db}.tiles)" + ), + FlatWithHash => format!("{frm_db}.tiles_with_hash"), + Normalized { hash_view } => { + if hash_view { + format!("{frm_db}.tiles_with_hash") + } else { + format!( + " + (SELECT zoom_level, tile_column, tile_row, tile_data, map.tile_id AS tile_hash + FROM {frm_db}.map JOIN {frm_db}.images ON map.tile_id = images.tile_id)" + ) + } + } + }, + Normalized { .. } => match frm_type { + Flat => format!( + " + (SELECT zoom_level, tile_column, tile_row, tile_data, md5_hex(tile_data) AS tile_hash + FROM {frm_db}.tiles)" + ), + FlatWithHash => format!("{frm_db}.tiles_with_hash"), + Normalized { hash_view } => { + if hash_view { + format!("{frm_db}.tiles_with_hash") + } else { + format!( + " + (SELECT zoom_level, tile_column, tile_row, tile_data, map.tile_id AS tile_hash + FROM {frm_db}.map JOIN {frm_db}.images ON map.tile_id = images.tile_id)" + ) + } + } + }, + } + } + + let tile_hash_expr = if dst_type == Flat { + String::new() + } else { + fn get_tile_hash_expr(tbl: &str, typ: MbtType) -> String { + match typ { + Flat => format!("IIF({tbl}.tile_data ISNULL, NULL, md5_hex({tbl}.tile_data))"), + FlatWithHash => format!("{tbl}.tile_hash"), + Normalized { .. } => format!("{tbl}.tile_hash"), + } + } + + format!( + ", COALESCE({}, {}) as tile_hash", + get_tile_hash_expr("difTiles", dif_type), + get_tile_hash_expr("srcTiles", src_type) + ) + }; + + let src_tiles = query_for_dst("sourceDb", src_type, dst_type); + let diff_tiles = query_for_dst("diffDb", dif_type, dst_type); + + // Take dif tile_data if it is set, otherwise take the one from src + // Skip tiles if src and dif both have a matching index, but the dif tile_data is NULL + format!( + " + SELECT COALESCE(srcTiles.zoom_level, difTiles.zoom_level) as zoom_level + , COALESCE(srcTiles.tile_column, difTiles.tile_column) as tile_column + , COALESCE(srcTiles.tile_row, difTiles.tile_row) as tile_row + , COALESCE(difTiles.tile_data, srcTiles.tile_data) as tile_data + {tile_hash_expr} + FROM {src_tiles} AS srcTiles FULL JOIN {diff_tiles} AS difTiles + ON srcTiles.zoom_level = difTiles.zoom_level + AND srcTiles.tile_column = difTiles.tile_column + AND srcTiles.tile_row = difTiles.tile_row + WHERE (difTiles.zoom_level ISNULL OR difTiles.tile_data NOTNULL)" + ) + } + + fn get_select_from_with_diff(dif_type: MbtType, dst_type: MbtType) -> String { + let tile_hash_expr; + let diff_tiles; + if dst_type == Flat { + tile_hash_expr = ""; + diff_tiles = "diffDb.tiles"; + } else { + tile_hash_expr = match dif_type { + Flat => ", COALESCE(md5_hex(difTiles.tile_data), '') as tile_hash", + FlatWithHash => ", COALESCE(difTiles.tile_hash, '') as tile_hash", + Normalized { .. } => ", COALESCE(difTiles.tile_hash, '') as tile_hash", + }; + diff_tiles = match dif_type { + Flat => "diffDb.tiles", + FlatWithHash => "diffDb.tiles_with_hash", + Normalized { .. } => { + " + (SELECT zoom_level, tile_column, tile_row, tile_data, map.tile_id AS tile_hash + FROM diffDb.map JOIN diffDb.images ON diffDb.map.tile_id = diffDb.images.tile_id)" + } + }; + } + + format!( + " + SELECT COALESCE(srcTiles.zoom_level, difTiles.zoom_level) as zoom_level + , COALESCE(srcTiles.tile_column, difTiles.tile_column) as tile_column + , COALESCE(srcTiles.tile_row, difTiles.tile_row) as tile_row + , difTiles.tile_data as tile_data + {tile_hash_expr} + FROM sourceDb.tiles AS srcTiles FULL JOIN {diff_tiles} AS difTiles + ON srcTiles.zoom_level = difTiles.zoom_level + AND srcTiles.tile_column = difTiles.tile_column + AND srcTiles.tile_row = difTiles.tile_row + WHERE (srcTiles.tile_data != difTiles.tile_data + OR srcTiles.tile_data ISNULL + OR difTiles.tile_data ISNULL)" + ) + } + + fn get_select_from(src_type: MbtType, dst_type: MbtType) -> &'static str { + if dst_type == Flat { + "SELECT zoom_level, tile_column, tile_row, tile_data FROM sourceDb.tiles WHERE TRUE" + } else { + match src_type { + Flat => { + " + SELECT zoom_level, tile_column, tile_row, tile_data, md5_hex(tile_data) as tile_hash + FROM sourceDb.tiles + WHERE TRUE" + } + FlatWithHash => { + " + SELECT zoom_level, tile_column, tile_row, tile_data, tile_hash + FROM sourceDb.tiles_with_hash + WHERE TRUE" + } + Normalized { .. } => { + " + SELECT zoom_level, tile_column, tile_row, tile_data, map.tile_id AS tile_hash + FROM sourceDb.map JOIN sourceDb.images + ON sourceDb.map.tile_id = sourceDb.images.tile_id + WHERE TRUE" + } + } + } + } + + fn get_where_clause(&self) -> (String, Vec) { + let mut query_args = vec![]; + + let sql = if !&self.options.zoom_levels.is_empty() { + for z in &self.options.zoom_levels { + query_args.push(*z); + } + format!( + " AND zoom_level IN ({})", + vec!["?"; self.options.zoom_levels.len()].join(",") + ) + } else if let Some(min_zoom) = self.options.min_zoom { + if let Some(max_zoom) = self.options.max_zoom { + query_args.push(min_zoom); + query_args.push(max_zoom); + " AND zoom_level BETWEEN ? AND ?".to_string() + } else { + query_args.push(min_zoom); + " AND zoom_level >= ?".to_string() + } + } else if let Some(max_zoom) = self.options.max_zoom { + query_args.push(max_zoom); + " AND zoom_level <= ?".to_string() + } else { + String::new() + }; + + (sql, query_args) + } +} + +#[cfg(test)] +mod tests { + use sqlx::{Decode, Sqlite, SqliteConnection, Type}; + + use super::*; + + const FLAT: Option = Some(MbtTypeCli::Flat); + const FLAT_WITH_HASH: Option = Some(MbtTypeCli::FlatWithHash); + const NORM_CLI: Option = Some(MbtTypeCli::Normalized); + const NORM_WITH_VIEW: MbtType = Normalized { hash_view: true }; + + async fn get_one(conn: &mut SqliteConnection, sql: &str) -> T + where + for<'r> T: Decode<'r, Sqlite> + Type, + { + query(sql).fetch_one(conn).await.unwrap().get::(0) + } + + async fn verify_copy_all( + src_filepath: PathBuf, + dst_filepath: PathBuf, + dst_type_cli: Option, + expected_dst_type: MbtType, + ) -> MbtResult<()> { + let mut opt = MbtilesCopier::new(src_filepath.clone(), dst_filepath.clone()); + opt.dst_type_cli = dst_type_cli; + let mut dst_conn = opt.run().await?; + + Mbtiles::new(src_filepath)? + .attach_to(&mut dst_conn, "testSrcDb") + .await?; + + assert_eq!( + Mbtiles::new(dst_filepath)? + .detect_type(&mut dst_conn) + .await?, + expected_dst_type + ); + + assert!(dst_conn + .fetch_optional("SELECT * FROM testSrcDb.tiles EXCEPT SELECT * FROM tiles") + .await? + .is_none()); + + Ok(()) + } + + async fn verify_copy_with_zoom_filter( + opt: MbtilesCopier, + expected_zoom_levels: u8, + ) -> MbtResult<()> { + let mut dst_conn = opt.run().await?; + + assert_eq!( + get_one::( + &mut dst_conn, + "SELECT COUNT(DISTINCT zoom_level) FROM tiles;" + ) + .await, + expected_zoom_levels + ); + + Ok(()) + } + + #[actix_rt::test] + async fn copy_flat_tables() -> MbtResult<()> { + let src = PathBuf::from("../tests/fixtures/mbtiles/world_cities.mbtiles"); + let dst = PathBuf::from("file:copy_flat_tables_mem_db?mode=memory&cache=shared"); + verify_copy_all(src, dst, None, Flat).await + } + + #[actix_rt::test] + async fn copy_flat_from_flat_with_hash_tables() -> MbtResult<()> { + let src = PathBuf::from("../tests/fixtures/mbtiles/zoomed_world_cities.mbtiles"); + let dst = PathBuf::from( + "file:copy_flat_from_flat_with_hash_tables_mem_db?mode=memory&cache=shared", + ); + verify_copy_all(src, dst, FLAT, Flat).await + } + + #[actix_rt::test] + async fn copy_flat_from_normalized_tables() -> MbtResult<()> { + let src = PathBuf::from("../tests/fixtures/mbtiles/geography-class-png.mbtiles"); + let dst = + PathBuf::from("file:copy_flat_from_normalized_tables_mem_db?mode=memory&cache=shared"); + verify_copy_all(src, dst, FLAT, Flat).await + } + + #[actix_rt::test] + async fn copy_flat_with_hash_tables() -> MbtResult<()> { + let src = PathBuf::from("../tests/fixtures/mbtiles/zoomed_world_cities.mbtiles"); + let dst = PathBuf::from("file:copy_flat_with_hash_tables_mem_db?mode=memory&cache=shared"); + verify_copy_all(src, dst, None, FlatWithHash).await + } + + #[actix_rt::test] + async fn copy_flat_with_hash_from_flat_tables() -> MbtResult<()> { + let src = PathBuf::from("../tests/fixtures/mbtiles/world_cities.mbtiles"); + let dst = PathBuf::from( + "file:copy_flat_with_hash_from_flat_tables_mem_db?mode=memory&cache=shared", + ); + verify_copy_all(src, dst, FLAT_WITH_HASH, FlatWithHash).await + } + + #[actix_rt::test] + async fn copy_flat_with_hash_from_normalized_tables() -> MbtResult<()> { + let src = PathBuf::from("../tests/fixtures/mbtiles/geography-class-png.mbtiles"); + let dst = PathBuf::from( + "file:copy_flat_with_hash_from_normalized_tables_mem_db?mode=memory&cache=shared", + ); + verify_copy_all(src, dst, FLAT_WITH_HASH, FlatWithHash).await + } + + #[actix_rt::test] + async fn copy_normalized_tables() -> MbtResult<()> { + let src = PathBuf::from("../tests/fixtures/mbtiles/geography-class-png.mbtiles"); + let dst = PathBuf::from("file:copy_normalized_tables_mem_db?mode=memory&cache=shared"); + verify_copy_all(src, dst, None, NORM_WITH_VIEW).await + } + + #[actix_rt::test] + async fn copy_normalized_from_flat_tables() -> MbtResult<()> { + let src = PathBuf::from("../tests/fixtures/mbtiles/world_cities.mbtiles"); + let dst = + PathBuf::from("file:copy_normalized_from_flat_tables_mem_db?mode=memory&cache=shared"); + verify_copy_all(src, dst, NORM_CLI, NORM_WITH_VIEW).await + } + + #[actix_rt::test] + async fn copy_normalized_from_flat_with_hash_tables() -> MbtResult<()> { + let src = PathBuf::from("../tests/fixtures/mbtiles/zoomed_world_cities.mbtiles"); + let dst = PathBuf::from( + "file:copy_normalized_from_flat_with_hash_tables_mem_db?mode=memory&cache=shared", + ); + verify_copy_all(src, dst, NORM_CLI, NORM_WITH_VIEW).await + } + + #[actix_rt::test] + async fn copy_with_min_max_zoom() -> MbtResult<()> { + let src = PathBuf::from("../tests/fixtures/mbtiles/world_cities.mbtiles"); + let dst = PathBuf::from("file:copy_with_min_max_zoom_mem_db?mode=memory&cache=shared"); + let mut opt = MbtilesCopier::new(src, dst); + opt.min_zoom = Some(2); + opt.max_zoom = Some(4); + verify_copy_with_zoom_filter(opt, 3).await + } + + #[actix_rt::test] + async fn copy_with_zoom_levels() -> MbtResult<()> { + let src = PathBuf::from("../tests/fixtures/mbtiles/world_cities.mbtiles"); + let dst = PathBuf::from("file:copy_with_zoom_levels_mem_db?mode=memory&cache=shared"); + let mut opt = MbtilesCopier::new(src, dst); + opt.min_zoom = Some(2); + opt.max_zoom = Some(4); + opt.zoom_levels.extend(&[1, 6]); + verify_copy_with_zoom_filter(opt, 2).await + } + + #[actix_rt::test] + async fn copy_with_diff_with_file() -> MbtResult<()> { + let src = PathBuf::from("../tests/fixtures/mbtiles/geography-class-jpg.mbtiles"); + let dst = PathBuf::from("file:copy_with_diff_with_file_mem_db?mode=memory&cache=shared"); + + let diff_file = + PathBuf::from("../tests/fixtures/mbtiles/geography-class-jpg-modified.mbtiles"); + + let mut opt = MbtilesCopier::new(src.clone(), dst.clone()); + opt.diff_with_file = Some(diff_file.clone()); + let mut dst_conn = opt.run().await?; + + assert!(dst_conn + .fetch_optional("SELECT 1 FROM sqlite_schema WHERE name = 'tiles';") + .await? + .is_some()); + + assert_eq!( + get_one::(&mut dst_conn, "SELECT COUNT(*) FROM map;").await, + 3 + ); + + assert!(get_one::>( + &mut dst_conn, + "SELECT * FROM tiles WHERE zoom_level = 2 AND tile_row = 2 AND tile_column = 2;" + ) + .await + .is_some()); + + assert!(get_one::>( + &mut dst_conn, + "SELECT * FROM tiles WHERE zoom_level = 1 AND tile_row = 1 AND tile_column = 1;" + ) + .await + .is_some()); + + assert!(get_one::>( + &mut dst_conn, + "SELECT * FROM map WHERE zoom_level = 0 AND tile_row = 0 AND tile_column = 0;", + ) + .await + .is_some()); + + Ok(()) + } + + #[actix_rt::test] + async fn copy_to_existing_abort_mode() { + let src = PathBuf::from("../tests/fixtures/mbtiles/world_cities_modified.mbtiles"); + let dst = PathBuf::from("../tests/fixtures/mbtiles/world_cities.mbtiles"); + + let mut opt = MbtilesCopier::new(src.clone(), dst.clone()); + opt.on_duplicate = CopyDuplicateMode::Abort; + + assert!(matches!( + opt.run().await.unwrap_err(), + MbtError::RusqliteError(..) + )); + } + + #[actix_rt::test] + async fn copy_to_existing_override_mode() -> MbtResult<()> { + let src_file = PathBuf::from("../tests/fixtures/mbtiles/world_cities_modified.mbtiles"); + + // Copy the dst file to an in-memory DB + let dst_file = PathBuf::from("../tests/fixtures/mbtiles/world_cities.mbtiles"); + let dst = + PathBuf::from("file:copy_to_existing_override_mode_mem_db?mode=memory&cache=shared"); + + let _dst_conn = MbtilesCopier::new(dst_file.clone(), dst.clone()) + .run() + .await?; + + let mut dst_conn = MbtilesCopier::new(src_file.clone(), dst.clone()) + .run() + .await?; + + // Verify the tiles in the destination file is a superset of the tiles in the source file + Mbtiles::new(src_file)? + .attach_to(&mut dst_conn, "testOtherDb") + .await?; + assert!(dst_conn + .fetch_optional("SELECT * FROM testOtherDb.tiles EXCEPT SELECT * FROM tiles;") + .await? + .is_none()); + + Ok(()) + } + + #[actix_rt::test] + async fn copy_to_existing_ignore_mode() -> MbtResult<()> { + let src_file = PathBuf::from("../tests/fixtures/mbtiles/world_cities_modified.mbtiles"); + + // Copy the dst file to an in-memory DB + let dst_file = PathBuf::from("../tests/fixtures/mbtiles/world_cities.mbtiles"); + let dst = + PathBuf::from("file:copy_to_existing_ignore_mode_mem_db?mode=memory&cache=shared"); + + let _dst_conn = MbtilesCopier::new(dst_file.clone(), dst.clone()) + .run() + .await?; + + let mut opt = MbtilesCopier::new(src_file.clone(), dst.clone()); + opt.on_duplicate = CopyDuplicateMode::Ignore; + let mut dst_conn = opt.run().await?; + + // Verify the tiles in the destination file are the same as those in the source file except for those with duplicate (zoom_level, tile_column, tile_row) + Mbtiles::new(src_file)? + .attach_to(&mut dst_conn, "testSrcDb") + .await?; + Mbtiles::new(dst_file)? + .attach_to(&mut dst_conn, "testOriginalDb") + .await?; + + // Create a temporary table with all the tiles in the original database and + // all the tiles in the source database except for those that conflict with tiles in the original database + dst_conn.execute( + "CREATE TEMP TABLE expected_tiles AS + SELECT COALESCE(t1.zoom_level, t2.zoom_level) as zoom_level, + COALESCE(t1.tile_column, t2.zoom_level) as tile_column, + COALESCE(t1.tile_row, t2.tile_row) as tile_row, + COALESCE(t1.tile_data, t2.tile_data) as tile_data + FROM testOriginalDb.tiles as t1 + FULL OUTER JOIN testSrcDb.tiles as t2 + ON t1.zoom_level = t2.zoom_level AND t1.tile_column = t2.tile_column AND t1.tile_row = t2.tile_row") + .await?; + + // Ensure all entries in expected_tiles are in tiles and vice versa + assert!(query( + "SELECT * FROM expected_tiles EXCEPT SELECT * FROM tiles + UNION + SELECT * FROM tiles EXCEPT SELECT * FROM expected_tiles" + ) + .fetch_optional(&mut dst_conn) + .await? + .is_none()); + + Ok(()) + } +} diff --git a/martin-mbtiles/src/errors.rs b/mbtiles/src/errors.rs similarity index 71% rename from martin-mbtiles/src/errors.rs rename to mbtiles/src/errors.rs index 43d200037..cb732b66e 100644 --- a/martin-mbtiles/src/errors.rs +++ b/mbtiles/src/errors.rs @@ -3,8 +3,16 @@ use std::path::PathBuf; use martin_tile_utils::TileInfo; use sqlite_hashes::rusqlite; +use crate::MbtType; + #[derive(thiserror::Error, Debug)] pub enum MbtError { + #[error("The source and destination MBTiles files are the same: {}", .0.display())] + SameSourceAndDestination(PathBuf), + + #[error("The diff file and source or destination MBTiles files are the same: {}", .0.display())] + SameDiffAndSourceOrDestination(PathBuf), + #[error("SQL Error {0}")] SqlxError(#[from] sqlx::Error), @@ -38,7 +46,7 @@ pub enum MbtError { #[error("No tiles found")] NoTilesFound, - #[error("The destination file {0} is non-empty")] + #[error("The destination file {0} is not empty. Some operations like creating a diff file require the destination file to be non-existent or empty.")] NonEmptyTargetFile(PathBuf), #[error("The file {0} does not have the required uniqueness constraint")] @@ -49,6 +57,12 @@ pub enum MbtError { #[error("Unexpected duplicate tiles found when copying")] DuplicateValues, + + #[error("Applying a patch while diffing is not supported")] + CannotApplyPatchAndDiff, + + #[error("The MBTiles file {0} has data of type {1}, but the desired type was set to {2}")] + MismatchedTargetType(PathBuf, MbtType, MbtType), } pub type MbtResult = Result; diff --git a/mbtiles/src/lib.rs b/mbtiles/src/lib.rs new file mode 100644 index 000000000..0c17da7b1 --- /dev/null +++ b/mbtiles/src/lib.rs @@ -0,0 +1,26 @@ +#![doc = include_str!("../README.md")] +#![allow(clippy::missing_errors_doc)] + +mod errors; +pub use errors::{MbtError, MbtResult}; + +mod mbtiles; +pub use mbtiles::{ + calc_agg_tiles_hash, IntegrityCheckType, MbtType, MbtTypeCli, Mbtiles, Metadata, + AGG_TILES_HASH, AGG_TILES_HASH_IN_DIFF, +}; + +mod pool; +pub use pool::MbtilesPool; + +mod copier; +pub use copier::{CopyDuplicateMode, MbtilesCopier}; + +mod patcher; +pub use patcher::apply_patch; + +mod queries; +pub use queries::{ + create_flat_tables, create_flat_with_hash_tables, create_metadata_table, + create_normalized_tables, is_flat_with_hash_tables_type, is_normalized_tables_type, +}; diff --git a/martin-mbtiles/src/mbtiles.rs b/mbtiles/src/mbtiles.rs similarity index 65% rename from martin-mbtiles/src/mbtiles.rs rename to mbtiles/src/mbtiles.rs index 4912e4e72..2faf2b894 100644 --- a/martin-mbtiles/src/mbtiles.rs +++ b/mbtiles/src/mbtiles.rs @@ -8,6 +8,7 @@ use std::str::FromStr; #[cfg(feature = "cli")] use clap::ValueEnum; +use enum_display::EnumDisplay; use futures::TryStreamExt; use log::{debug, info, warn}; use martin_tile_utils::{Format, TileInfo}; @@ -15,16 +16,14 @@ use serde::ser::SerializeStruct; use serde::Serialize; use serde_json::{Value as JSONValue, Value}; use sqlite_hashes::register_md5_function; -use sqlite_hashes::rusqlite::{ - Connection as RusqliteConnection, Connection, OpenFlags, OptionalExtension, -}; -use sqlx::sqlite::SqliteRow; -use sqlx::{query, Row, SqliteExecutor}; +use sqlx::sqlite::{SqliteConnectOptions, SqliteRow}; +use sqlx::{query, Connection as _, Row, SqliteConnection, SqliteExecutor}; use tilejson::{tilejson, Bounds, Center, TileJSON}; use crate::errors::{MbtError, MbtResult}; -use crate::mbtiles_queries::{ - is_flat_tables_type, is_flat_with_hash_tables_type, is_normalized_tables_type, +use crate::queries::{ + has_tiles_with_hash, is_flat_tables_type, is_flat_with_hash_tables_type, + is_normalized_tables_type, }; use crate::MbtError::{ AggHashMismatch, AggHashValueNotFound, FailedIntegrityCheck, IncorrectTileHash, @@ -57,15 +56,42 @@ where s.end() } -#[derive(Debug, Clone, Copy, PartialEq, Eq)] +/// Metadata key for the aggregate tiles hash value +pub const AGG_TILES_HASH: &str = "agg_tiles_hash"; + +/// Metadata key for a diff file, +/// describing the eventual AGG_TILES_HASH value once the diff is applied +pub const AGG_TILES_HASH_IN_DIFF: &str = "agg_tiles_hash_after_apply"; + +#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, EnumDisplay)] +#[enum_display(case = "Kebab")] #[cfg_attr(feature = "cli", derive(ValueEnum))] -pub enum MbtType { +pub enum MbtTypeCli { Flat, FlatWithHash, Normalized, } -#[derive(PartialEq, Eq, Default, Debug, Clone)] +#[derive(Debug, Clone, Copy, Hash, PartialEq, Eq, EnumDisplay)] +#[enum_display(case = "Kebab")] +pub enum MbtType { + Flat, + FlatWithHash, + Normalized { hash_view: bool }, +} + +impl MbtType { + pub fn is_normalized(&self) -> bool { + matches!(self, Self::Normalized { .. }) + } + + pub fn is_normalized_with_view(&self) -> bool { + matches!(self, Self::Normalized { hash_view: true }) + } +} + +#[derive(PartialEq, Eq, Default, Debug, Clone, EnumDisplay)] +#[enum_display(case = "Kebab")] #[cfg_attr(feature = "cli", derive(ValueEnum))] pub enum IntegrityCheckType { #[default] @@ -80,6 +106,12 @@ pub struct Mbtiles { filename: String, } +impl Display for Mbtiles { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.filepath) + } +} + impl Mbtiles { pub fn new>(filepath: P) -> MbtResult { let path = filepath.as_ref(); @@ -96,6 +128,34 @@ impl Mbtiles { }) } + pub async fn open(&self) -> MbtResult { + debug!("Opening w/ defaults {self}"); + let opt = SqliteConnectOptions::new().filename(self.filepath()); + Self::open_int(&opt).await + } + + pub async fn open_or_new(&self) -> MbtResult { + debug!("Opening or creating {self}"); + let opt = SqliteConnectOptions::new() + .filename(self.filepath()) + .create_if_missing(true); + Self::open_int(&opt).await + } + + pub async fn open_readonly(&self) -> MbtResult { + debug!("Opening as readonly {self}"); + let opt = SqliteConnectOptions::new() + .filename(self.filepath()) + .read_only(true); + Self::open_int(&opt).await + } + + async fn open_int(opt: &SqliteConnectOptions) -> Result { + let mut conn = SqliteConnection::connect_with(opt).await?; + attach_hash_fn(&mut conn).await?; + Ok(conn) + } + #[must_use] pub fn filepath(&self) -> &str { &self.filepath @@ -117,6 +177,20 @@ impl Mbtiles { } } + /// Attach this MBTiles file to the given SQLite connection as a given name + pub async fn attach_to(&self, conn: &mut T, name: &str) -> MbtResult<()> + where + for<'e> &'e mut T: SqliteExecutor<'e>, + { + debug!("Attaching {self} as {name}"); + query(&format!("ATTACH DATABASE ? AS {name}")) + .bind(self.filepath()) + .execute(conn) + .await?; + Ok(()) + } + + /// Get a single metadata value from the metadata table pub async fn get_metadata_value(&self, conn: &mut T, key: &str) -> MbtResult> where for<'e> &'e mut T: SqliteExecutor<'e>, @@ -131,19 +205,38 @@ impl Mbtiles { Ok(None) } + pub async fn validate( + &self, + check_type: IntegrityCheckType, + update_agg_tiles_hash: bool, + ) -> MbtResult { + let mut conn = if update_agg_tiles_hash { + self.open().await? + } else { + self.open_readonly().await? + }; + self.check_integrity(&mut conn, check_type).await?; + self.check_each_tile_hash(&mut conn).await?; + if update_agg_tiles_hash { + self.update_agg_tiles_hash(&mut conn).await + } else { + self.check_agg_tiles_hashes(&mut conn).await + } + } + /// Get the aggregate tiles hash value from the metadata table pub async fn get_agg_tiles_hash(&self, conn: &mut T) -> MbtResult> where for<'e> &'e mut T: SqliteExecutor<'e>, { - self.get_metadata_value(&mut *conn, "agg_tiles_hash").await + self.get_metadata_value(&mut *conn, AGG_TILES_HASH).await } pub async fn set_metadata_value( &self, conn: &mut T, key: &str, - value: Option, + value: Option<&str>, ) -> MbtResult<()> where for<'e> &'e mut T: SqliteExecutor<'e>, @@ -353,12 +446,20 @@ impl Mbtiles { Ok(None) } + pub async fn open_and_detect_type(&self) -> MbtResult { + let mut conn = self.open_readonly().await?; + self.detect_type(&mut conn).await + } + pub async fn detect_type(&self, conn: &mut T) -> MbtResult where for<'e> &'e mut T: SqliteExecutor<'e>, { - let mbt_type = if is_normalized_tables_type(&mut *conn).await? { - MbtType::Normalized + debug!("Detecting MBTiles type for {self}"); + let typ = if is_normalized_tables_type(&mut *conn).await? { + MbtType::Normalized { + hash_view: has_tiles_with_hash(&mut *conn).await?, + } } else if is_flat_with_hash_tables_type(&mut *conn).await? { MbtType::FlatWithHash } else if is_flat_tables_type(&mut *conn).await? { @@ -367,10 +468,10 @@ impl Mbtiles { return Err(MbtError::InvalidDataFormat(self.filepath.clone())); }; - self.check_for_uniqueness_constraint(&mut *conn, mbt_type) + self.check_for_uniqueness_constraint(&mut *conn, typ) .await?; - Ok(mbt_type) + Ok(typ) } async fn check_for_uniqueness_constraint( @@ -384,7 +485,7 @@ impl Mbtiles { let table_name = match mbt_type { MbtType::Flat => "tiles", MbtType::FlatWithHash => "tiles_with_hash", - MbtType::Normalized => "map", + MbtType::Normalized { .. } => "map", }; let indexes = query("SELECT name FROM pragma_index_list(?) WHERE [unique] = 1") @@ -420,41 +521,6 @@ impl Mbtiles { Err(MbtError::NoUniquenessConstraint(self.filepath.clone())) } - /// Compute the hash of the combined tiles in the mbtiles file tiles table/view. - /// This should work on all mbtiles files perf `MBTiles` specification. - fn calc_agg_tiles_hash(&self) -> MbtResult { - Ok(self.open_with_hashes(true)?.query_row_and_then( - // The md5_concat func will return NULL if there are no rows in the tiles table. - // For our use case, we will treat it as an empty string, and hash that. - "SELECT hex( - coalesce( - md5_concat( - cast(zoom_level AS text), - cast(tile_column AS text), - cast(tile_row AS text), - tile_data - ), - md5('') - ) - ) - FROM tiles - ORDER BY zoom_level, tile_column, tile_row;", - [], - |row| row.get(0), - )?) - } - - pub(crate) fn open_with_hashes(&self, is_readonly: bool) -> MbtResult { - let flags = if is_readonly { - OpenFlags::SQLITE_OPEN_READ_ONLY - } else { - OpenFlags::default() - }; - let rusqlite_conn = RusqliteConnection::open_with_flags(self.filepath(), flags)?; - register_md5_function(&rusqlite_conn)?; - Ok(rusqlite_conn) - } - /// Perform `SQLite` internal integrity check pub async fn check_integrity( &self, @@ -465,6 +531,7 @@ impl Mbtiles { for<'e> &'e mut T: SqliteExecutor<'e>, { if integrity_check == IntegrityCheckType::Off { + info!("Skipping integrity check for {self}"); return Ok(()); } @@ -481,61 +548,53 @@ impl Mbtiles { if result.len() > 1 || result.get(0).ok_or(FailedIntegrityCheck( - self.filepath().to_string(), + self.filepath.to_string(), vec!["SQLite could not perform integrity check".to_string()], ))? != "ok" { return Err(FailedIntegrityCheck(self.filepath().to_string(), result)); } + info!("{integrity_check:?} integrity check passed for {self}"); Ok(()) } - pub async fn check_agg_tile_hashes(&self, conn: &mut T) -> MbtResult<()> + pub async fn check_agg_tiles_hashes(&self, conn: &mut T) -> MbtResult where for<'e> &'e mut T: SqliteExecutor<'e>, { let Some(stored) = self.get_agg_tiles_hash(&mut *conn).await? else { return Err(AggHashValueNotFound(self.filepath().to_string())); }; - - let computed = self.calc_agg_tiles_hash()?; + let computed = calc_agg_tiles_hash(&mut *conn).await?; if stored != computed { let file = self.filepath().to_string(); return Err(AggHashMismatch(computed, stored, file)); } - Ok(()) + info!("The agg_tiles_hashes={computed} has been verified for {self}"); + Ok(computed) } /// Compute new aggregate tiles hash and save it to the metadata table (if needed) - pub async fn update_agg_tiles_hash(&self, conn: &mut T) -> MbtResult<()> + pub async fn update_agg_tiles_hash(&self, conn: &mut T) -> MbtResult where for<'e> &'e mut T: SqliteExecutor<'e>, { let old_hash = self.get_agg_tiles_hash(&mut *conn).await?; - let hash = self.calc_agg_tiles_hash()?; + let hash = calc_agg_tiles_hash(&mut *conn).await?; if old_hash.as_ref() == Some(&hash) { - info!( - "agg_tiles_hash is already set to the correct value `{hash}` in {}", - self.filepath() - ); - Ok(()) + info!("Metadata value agg_tiles_hash is already set to the correct hash `{hash}` in {self}"); } else { if let Some(old_hash) = old_hash { - info!( - "Updating agg_tiles_hash from {old_hash} to {hash} in {}", - self.filepath() - ); + info!("Updating agg_tiles_hash from {old_hash} to {hash} in {self}"); } else { - info!( - "Initializing agg_tiles_hash to {hash} in {}", - self.filepath() - ); + info!("Adding a new metadata value agg_tiles_hash = {hash} in {self}"); } - self.set_metadata_value(&mut *conn, "agg_tiles_hash", Some(hash)) - .await + self.set_metadata_value(&mut *conn, AGG_TILES_HASH, Some(&hash)) + .await?; } + Ok(hash) } pub async fn check_each_tile_hash(&self, conn: &mut T) -> MbtResult<()> @@ -545,24 +604,24 @@ impl Mbtiles { // Note that hex() always returns upper-case HEX values let sql = match self.detect_type(&mut *conn).await? { MbtType::Flat => { - println!("Skipping per-tile hash validation because this is a flat MBTiles file"); + info!("Skipping per-tile hash validation because this is a flat MBTiles file"); return Ok(()); } MbtType::FlatWithHash => { "SELECT expected, computed FROM ( SELECT upper(tile_hash) AS expected, - hex(md5(tile_data)) AS computed + md5_hex(tile_data) AS computed FROM tiles_with_hash ) AS t WHERE expected != computed LIMIT 1;" } - MbtType::Normalized => { + MbtType::Normalized { .. } => { "SELECT expected, computed FROM ( SELECT upper(tile_id) AS expected, - hex(md5(tile_data)) AS computed + md5_hex(tile_data) AS computed FROM images ) AS t WHERE expected != computed @@ -570,45 +629,95 @@ impl Mbtiles { } }; - self.open_with_hashes(true)? - .query_row_and_then(sql, [], |r| Ok((r.get(0)?, r.get(1)?))) - .optional()? - .map_or(Ok(()), |v: (String, String)| { - Err(IncorrectTileHash(self.filepath().to_string(), v.0, v.1)) - }) + query(sql) + .fetch_optional(&mut *conn) + .await? + .map_or(Ok(()), |v| { + Err(IncorrectTileHash( + self.filepath().to_string(), + v.get(0), + v.get(1), + )) + })?; + + info!("All tile hashes are valid for {self}"); + Ok(()) } } +/// Compute the hash of the combined tiles in the mbtiles file tiles table/view. +/// This should work on all mbtiles files perf `MBTiles` specification. +pub async fn calc_agg_tiles_hash(conn: &mut T) -> MbtResult +where + for<'e> &'e mut T: SqliteExecutor<'e>, +{ + debug!("Calculating agg_tiles_hash"); + let query = query( + // The md5_concat func will return NULL if there are no rows in the tiles table. + // For our use case, we will treat it as an empty string, and hash that. + // `tile_data` values must be stored as a blob per MBTiles spec + // `md5` functions will fail if the value is not text/blob/null + // + // Note that ORDER BY controls the output ordering, which is important for the hash value, + // and having it at the top level would not order values properly. + // See https://sqlite.org/forum/forumpost/228bb96e12a746ce + " +SELECT coalesce( + (SELECT md5_concat_hex( + cast(zoom_level AS text), + cast(tile_column AS text), + cast(tile_row AS text), + tile_data + ) + OVER (ORDER BY zoom_level, tile_column, tile_row ROWS + BETWEEN UNBOUNDED PRECEDING AND UNBOUNDED FOLLOWING) + FROM tiles + LIMIT 1), + md5_hex('') +); +", + ); + Ok(query.fetch_one(conn).await?.get::(0)) +} + +pub async fn attach_hash_fn(conn: &mut SqliteConnection) -> MbtResult<()> { + let mut handle_lock = conn.lock_handle().await?; + let handle = handle_lock.as_raw_handle().as_ptr(); + // Safety: we know that the handle is a SQLite connection is locked and is not used anywhere else. + // The registered functions will be dropped when SQLX drops DB connection. + let rc = unsafe { sqlite_hashes::rusqlite::Connection::from_handle(handle) }?; + register_md5_function(&rc)?; + Ok(()) +} + #[cfg(test)] mod tests { use std::collections::HashMap; use martin_tile_utils::Encoding; - use sqlx::{Connection, SqliteConnection}; + use sqlx::Executor as _; use tilejson::VectorLayer; use super::*; - async fn open(filepath: &str) -> (SqliteConnection, Mbtiles) { - let mbt = Mbtiles::new(filepath).unwrap(); - ( - SqliteConnection::connect(mbt.filepath()).await.unwrap(), - mbt, - ) + async fn open(filepath: &str) -> MbtResult<(SqliteConnection, Mbtiles)> { + let mbt = Mbtiles::new(filepath)?; + mbt.open().await.map(|conn| (conn, mbt)) } #[actix_rt::test] - async fn mbtiles_meta() { - let filepath = "../tests/fixtures/files/geography-class-jpg.mbtiles"; - let mbt = Mbtiles::new(filepath).unwrap(); + async fn mbtiles_meta() -> MbtResult<()> { + let filepath = "../tests/fixtures/mbtiles/geography-class-jpg.mbtiles"; + let mbt = Mbtiles::new(filepath)?; assert_eq!(mbt.filepath(), filepath); assert_eq!(mbt.filename(), "geography-class-jpg"); + Ok(()) } #[actix_rt::test] - async fn metadata_jpeg() { - let (mut conn, mbt) = open("../tests/fixtures/files/geography-class-jpg.mbtiles").await; - let metadata = mbt.get_metadata(&mut conn).await.unwrap(); + async fn metadata_jpeg() -> MbtResult<()> { + let (mut conn, mbt) = open("../tests/fixtures/mbtiles/geography-class-jpg.mbtiles").await?; + let metadata = mbt.get_metadata(&mut conn).await?; let tj = metadata.tilejson; assert_eq!(tj.description.unwrap(), "One of the example maps that comes with TileMill - a bright & colorful world map that blends retro and high-tech with its folded paper texture and interactive flag tooltips. "); @@ -620,12 +729,13 @@ mod tests { assert_eq!(tj.version.unwrap(), "1.0.0"); assert_eq!(metadata.id, "geography-class-jpg"); assert_eq!(metadata.tile_info, Format::Jpeg.into()); + Ok(()) } #[actix_rt::test] - async fn metadata_mvt() { - let (mut conn, mbt) = open("../tests/fixtures/files/world_cities.mbtiles").await; - let metadata = mbt.get_metadata(&mut conn).await.unwrap(); + async fn metadata_mvt() -> MbtResult<()> { + let (mut conn, mbt) = open("../tests/fixtures/mbtiles/world_cities.mbtiles").await?; + let metadata = mbt.get_metadata(&mut conn).await?; let tj = metadata.tilejson; assert_eq!(tj.maxzoom.unwrap(), 6); @@ -651,101 +761,92 @@ mod tests { TileInfo::new(Format::Mvt, Encoding::Gzip) ); assert_eq!(metadata.layer_type, Some("overlay".to_string())); + Ok(()) } #[actix_rt::test] - async fn metadata_get_key() { - let (mut conn, mbt) = open("../tests/fixtures/files/world_cities.mbtiles").await; - - let res = mbt.get_metadata_value(&mut conn, "bounds").await.unwrap(); - assert_eq!(res.unwrap(), "-123.123590,-37.818085,174.763027,59.352706"); - let res = mbt.get_metadata_value(&mut conn, "name").await.unwrap(); - assert_eq!(res.unwrap(), "Major cities from Natural Earth data"); - let res = mbt.get_metadata_value(&mut conn, "maxzoom").await.unwrap(); - assert_eq!(res.unwrap(), "6"); - let res = mbt.get_metadata_value(&mut conn, "nonexistent_key").await; - assert_eq!(res.unwrap(), None); - let res = mbt.get_metadata_value(&mut conn, "").await; - assert_eq!(res.unwrap(), None); + async fn metadata_get_key() -> MbtResult<()> { + let (mut conn, mbt) = open("../tests/fixtures/mbtiles/world_cities.mbtiles").await?; + + let res = mbt.get_metadata_value(&mut conn, "bounds").await?.unwrap(); + assert_eq!(res, "-123.123590,-37.818085,174.763027,59.352706"); + let res = mbt.get_metadata_value(&mut conn, "name").await?.unwrap(); + assert_eq!(res, "Major cities from Natural Earth data"); + let res = mbt.get_metadata_value(&mut conn, "maxzoom").await?.unwrap(); + assert_eq!(res, "6"); + let res = mbt.get_metadata_value(&mut conn, "nonexistent_key").await?; + assert_eq!(res, None); + let res = mbt.get_metadata_value(&mut conn, "").await?; + assert_eq!(res, None); + Ok(()) } #[actix_rt::test] - async fn metadata_set_key() { - let (mut conn, mbt) = open("file:metadata_set_key_mem_db?mode=memory&cache=shared").await; + async fn metadata_set_key() -> MbtResult<()> { + let (mut conn, mbt) = open("file:metadata_set_key_mem_db?mode=memory&cache=shared").await?; - query("CREATE TABLE metadata (name text NOT NULL PRIMARY KEY, value text);") - .execute(&mut conn) - .await - .unwrap(); + conn.execute("CREATE TABLE metadata (name text NOT NULL PRIMARY KEY, value text);") + .await?; - mbt.set_metadata_value(&mut conn, "bounds", Some("0.0, 0.0, 0.0, 0.0".to_string())) - .await - .unwrap(); + mbt.set_metadata_value(&mut conn, "bounds", Some("0.0, 0.0, 0.0, 0.0")) + .await?; assert_eq!( - mbt.get_metadata_value(&mut conn, "bounds") - .await - .unwrap() - .unwrap(), + mbt.get_metadata_value(&mut conn, "bounds").await?.unwrap(), "0.0, 0.0, 0.0, 0.0" ); mbt.set_metadata_value( &mut conn, "bounds", - Some("-123.123590,-37.818085,174.763027,59.352706".to_string()), + Some("-123.123590,-37.818085,174.763027,59.352706"), ) - .await - .unwrap(); + .await?; assert_eq!( - mbt.get_metadata_value(&mut conn, "bounds") - .await - .unwrap() - .unwrap(), + mbt.get_metadata_value(&mut conn, "bounds").await?.unwrap(), "-123.123590,-37.818085,174.763027,59.352706" ); - mbt.set_metadata_value(&mut conn, "bounds", None) - .await - .unwrap(); - assert_eq!( - mbt.get_metadata_value(&mut conn, "bounds").await.unwrap(), - None - ); + mbt.set_metadata_value(&mut conn, "bounds", None).await?; + assert_eq!(mbt.get_metadata_value(&mut conn, "bounds").await?, None); + + Ok(()) } #[actix_rt::test] - async fn detect_type() { - let (mut conn, mbt) = open("../tests/fixtures/files/world_cities.mbtiles").await; - let res = mbt.detect_type(&mut conn).await.unwrap(); + async fn detect_type() -> MbtResult<()> { + let (mut conn, mbt) = open("../tests/fixtures/mbtiles/world_cities.mbtiles").await?; + let res = mbt.detect_type(&mut conn).await?; assert_eq!(res, MbtType::Flat); - let (mut conn, mbt) = open("../tests/fixtures/files/zoomed_world_cities.mbtiles").await; - let res = mbt.detect_type(&mut conn).await.unwrap(); + let (mut conn, mbt) = open("../tests/fixtures/mbtiles/zoomed_world_cities.mbtiles").await?; + let res = mbt.detect_type(&mut conn).await?; assert_eq!(res, MbtType::FlatWithHash); - let (mut conn, mbt) = open("../tests/fixtures/files/geography-class-jpg.mbtiles").await; - let res = mbt.detect_type(&mut conn).await.unwrap(); - assert_eq!(res, MbtType::Normalized); + let (mut conn, mbt) = open("../tests/fixtures/mbtiles/geography-class-jpg.mbtiles").await?; + let res = mbt.detect_type(&mut conn).await?; + assert_eq!(res, MbtType::Normalized { hash_view: false }); - let (mut conn, mbt) = open(":memory:").await; + let (mut conn, mbt) = open(":memory:").await?; let res = mbt.detect_type(&mut conn).await; assert!(matches!(res, Err(MbtError::InvalidDataFormat(_)))); + + Ok(()) } #[actix_rt::test] - async fn validate_valid_file() { - let (mut conn, mbt) = open("../tests/fixtures/files/zoomed_world_cities.mbtiles").await; - + async fn validate_valid_file() -> MbtResult<()> { + let (mut conn, mbt) = open("../tests/fixtures/mbtiles/zoomed_world_cities.mbtiles").await?; mbt.check_integrity(&mut conn, IntegrityCheckType::Quick) - .await - .unwrap(); + .await?; + Ok(()) } #[actix_rt::test] - async fn validate_invalid_file() { + async fn validate_invalid_file() -> MbtResult<()> { let (mut conn, mbt) = - open("../tests/fixtures/files/invalid/invalid_zoomed_world_cities.mbtiles").await; - let result = mbt.check_agg_tile_hashes(&mut conn).await; + open("../tests/fixtures/files/invalid_zoomed_world_cities.mbtiles").await?; + let result = mbt.check_agg_tiles_hashes(&mut conn).await; assert!(matches!(result, Err(MbtError::AggHashMismatch(..)))); + Ok(()) } } diff --git a/mbtiles/src/patcher.rs b/mbtiles/src/patcher.rs new file mode 100644 index 000000000..94444a917 --- /dev/null +++ b/mbtiles/src/patcher.rs @@ -0,0 +1,189 @@ +use std::path::PathBuf; + +use log::{debug, info}; +use sqlx::query; + +use crate::queries::detach_db; +use crate::MbtType::{Flat, FlatWithHash, Normalized}; +use crate::{MbtResult, Mbtiles, AGG_TILES_HASH, AGG_TILES_HASH_IN_DIFF}; + +pub async fn apply_patch(src_file: PathBuf, patch_file: PathBuf) -> MbtResult<()> { + let src_mbt = Mbtiles::new(src_file)?; + let patch_mbt = Mbtiles::new(patch_file)?; + let patch_type = patch_mbt.open_and_detect_type().await?; + + let mut conn = src_mbt.open().await?; + let src_type = src_mbt.detect_type(&mut conn).await?; + patch_mbt.attach_to(&mut conn, "patchDb").await?; + + info!("Applying patch file {patch_mbt} ({patch_type}) to {src_mbt} ({src_type})"); + let select_from = if src_type == Flat { + "SELECT zoom_level, tile_column, tile_row, tile_data FROM patchDb.tiles" + } else { + match patch_type { + Flat => { + " + SELECT zoom_level, tile_column, tile_row, tile_data, md5_hex(tile_data) as hash + FROM patchDb.tiles" + } + FlatWithHash => { + " + SELECT zoom_level, tile_column, tile_row, tile_data, tile_hash AS hash + FROM patchDb.tiles_with_hash" + } + Normalized { .. } => { + " + SELECT zoom_level, tile_column, tile_row, tile_data, map.tile_id AS hash + FROM patchDb.map LEFT JOIN patchDb.images + ON patchDb.map.tile_id = patchDb.images.tile_id" + } + } + } + .to_string(); + + let (main_table, insert_sql) = match src_type { + Flat => ( + "tiles", + vec![format!( + " + INSERT OR REPLACE INTO tiles (zoom_level, tile_column, tile_row, tile_data) + {select_from}" + )], + ), + FlatWithHash => ( + "tiles_with_hash", + vec![format!( + " + INSERT OR REPLACE INTO tiles_with_hash (zoom_level, tile_column, tile_row, tile_data, tile_hash) + {select_from}" + )], + ), + Normalized { .. } => ( + "map", + vec![ + format!( + " + INSERT OR REPLACE INTO map (zoom_level, tile_column, tile_row, tile_id) + SELECT zoom_level, tile_column, tile_row, hash as tile_id + FROM ({select_from})" + ), + format!( + " + INSERT OR REPLACE INTO images (tile_id, tile_data) + SELECT hash as tile_id, tile_data + FROM ({select_from})" + ), + ], + ), + }; + + for statement in insert_sql { + query(&format!("{statement} WHERE tile_data NOTNULL")) + .execute(&mut conn) + .await?; + } + + query(&format!( + " + DELETE FROM {main_table} + WHERE (zoom_level, tile_column, tile_row) IN ( + SELECT zoom_level, tile_column, tile_row FROM ({select_from} WHERE tile_data ISNULL) + )" + )) + .execute(&mut conn) + .await?; + + if src_type.is_normalized() { + debug!("Removing unused tiles from the images table (normalized schema)"); + query("DELETE FROM images WHERE tile_id NOT IN (SELECT tile_id FROM map)") + .execute(&mut conn) + .await?; + } + + // Copy metadata from patchDb to the destination file, replacing existing values + // Convert 'agg_tiles_hash_in_patch' into 'agg_tiles_hash' + // Delete metadata entries if the value is NULL in patchDb + query(&format!( + " + INSERT OR REPLACE INTO metadata (name, value) + SELECT IIF(name = '{AGG_TILES_HASH_IN_DIFF}', '{AGG_TILES_HASH}', name) as name, + value + FROM patchDb.metadata + WHERE name NOTNULL AND name != '{AGG_TILES_HASH}';" + )) + .execute(&mut conn) + .await?; + + query( + " + DELETE FROM metadata + WHERE name IN (SELECT name FROM patchDb.metadata WHERE value ISNULL);", + ) + .execute(&mut conn) + .await?; + + detach_db(&mut conn, "patchDb").await +} + +#[cfg(test)] +mod tests { + use sqlx::Executor as _; + + use super::*; + use crate::MbtilesCopier; + + #[actix_rt::test] + async fn apply_flat_patch_file() -> MbtResult<()> { + // Copy the src file to an in-memory DB + let src_file = PathBuf::from("../tests/fixtures/mbtiles/world_cities.mbtiles"); + let src = PathBuf::from("file:apply_flat_diff_file_mem_db?mode=memory&cache=shared"); + + let mut src_conn = MbtilesCopier::new(src_file.clone(), src.clone()) + .run() + .await?; + + // Apply patch to the src data in in-memory DB + let patch_file = PathBuf::from("../tests/fixtures/mbtiles/world_cities_diff.mbtiles"); + apply_patch(src, patch_file).await?; + + // Verify the data is the same as the file the patch was generated from + Mbtiles::new("../tests/fixtures/mbtiles/world_cities_modified.mbtiles")? + .attach_to(&mut src_conn, "testOtherDb") + .await?; + + assert!(src_conn + .fetch_optional("SELECT * FROM tiles EXCEPT SELECT * FROM testOtherDb.tiles;") + .await? + .is_none()); + + Ok(()) + } + + #[actix_rt::test] + async fn apply_normalized_patch_file() -> MbtResult<()> { + // Copy the src file to an in-memory DB + let src_file = PathBuf::from("../tests/fixtures/mbtiles/geography-class-jpg.mbtiles"); + let src = PathBuf::from("file:apply_normalized_diff_file_mem_db?mode=memory&cache=shared"); + + let mut src_conn = MbtilesCopier::new(src_file.clone(), src.clone()) + .run() + .await?; + + // Apply patch to the src data in in-memory DB + let patch_file = + PathBuf::from("../tests/fixtures/mbtiles/geography-class-jpg-diff.mbtiles"); + apply_patch(src, patch_file).await?; + + // Verify the data is the same as the file the patch was generated from + Mbtiles::new("../tests/fixtures/mbtiles/geography-class-jpg-modified.mbtiles")? + .attach_to(&mut src_conn, "testOtherDb") + .await?; + + assert!(src_conn + .fetch_optional("SELECT * FROM tiles EXCEPT SELECT * FROM testOtherDb.tiles;") + .await? + .is_none()); + + Ok(()) + } +} diff --git a/martin-mbtiles/src/mbtiles_pool.rs b/mbtiles/src/pool.rs similarity index 100% rename from martin-mbtiles/src/mbtiles_pool.rs rename to mbtiles/src/pool.rs diff --git a/mbtiles/src/queries.rs b/mbtiles/src/queries.rs new file mode 100644 index 000000000..c41531ff5 --- /dev/null +++ b/mbtiles/src/queries.rs @@ -0,0 +1,283 @@ +use log::debug; +use sqlx::{query, Executor as _, SqliteExecutor}; + +use crate::errors::MbtResult; + +/// Returns true if the database is empty (no tables/indexes/...) +pub async fn is_empty_database(conn: &mut T) -> MbtResult +where + for<'e> &'e mut T: SqliteExecutor<'e>, +{ + Ok(query!("SELECT 1 as has_rows FROM sqlite_schema LIMIT 1") + .fetch_optional(&mut *conn) + .await? + .is_none()) +} + +pub async fn is_normalized_tables_type(conn: &mut T) -> MbtResult +where + for<'e> &'e mut T: SqliteExecutor<'e>, +{ + let sql = query!( + "SELECT ( + -- Has a 'map' table + SELECT COUNT(*) = 1 + FROM sqlite_master + WHERE name = 'map' + AND type = 'table' + -- + ) AND ( + -- 'map' table's columns and their types are as expected: + -- 4 columns (zoom_level, tile_column, tile_row, tile_id). + -- The order is not important + SELECT COUNT(*) = 4 + FROM pragma_table_info('map') + WHERE ((name = 'zoom_level' AND type = 'INTEGER') + OR (name = 'tile_column' AND type = 'INTEGER') + OR (name = 'tile_row' AND type = 'INTEGER') + OR (name = 'tile_id' AND type = 'TEXT')) + -- + ) AND ( + -- Has a 'images' table + SELECT COUNT(*) = 1 + FROM sqlite_master + WHERE name = 'images' + AND type = 'table' + -- + ) AND ( + -- 'images' table's columns and their types are as expected: + -- 2 columns (tile_id, tile_data). + -- The order is not important + SELECT COUNT(*) = 2 + FROM pragma_table_info('images') + WHERE ((name = 'tile_id' AND type = 'TEXT') + OR (name = 'tile_data' AND type = 'BLOB')) + -- + ) AS is_valid;" + ); + + Ok(sql + .fetch_one(&mut *conn) + .await? + .is_valid + .unwrap_or_default() + == 1) +} + +/// Check if MBTiles has a table or a view named 'tiles_with_hash' with needed fields +pub async fn has_tiles_with_hash(conn: &mut T) -> MbtResult +where + for<'e> &'e mut T: SqliteExecutor<'e>, +{ + let sql = query!( + "SELECT ( + -- 'tiles_with_hash' table or view columns and their types are as expected: + -- 5 columns (zoom_level, tile_column, tile_row, tile_data, tile_hash). + -- The order is not important + SELECT COUNT(*) = 5 + FROM pragma_table_info('tiles_with_hash') + WHERE ((name = 'zoom_level' AND type = 'INTEGER') + OR (name = 'tile_column' AND type = 'INTEGER') + OR (name = 'tile_row' AND type = 'INTEGER') + OR (name = 'tile_data' AND type = 'BLOB') + OR (name = 'tile_hash' AND type = 'TEXT')) + -- + ) as is_valid;" + ); + + Ok(sql + .fetch_one(&mut *conn) + .await? + .is_valid + .unwrap_or_default() + == 1) +} + +pub async fn is_flat_with_hash_tables_type(conn: &mut T) -> MbtResult +where + for<'e> &'e mut T: SqliteExecutor<'e>, +{ + let sql = query!( + "SELECT ( + -- Has a 'tiles_with_hash' table + SELECT COUNT(*) = 1 + FROM sqlite_master + WHERE name = 'tiles_with_hash' + AND type = 'table' + -- + ) as is_valid;" + ); + + let is_valid = sql.fetch_one(&mut *conn).await?.is_valid; + + Ok(is_valid.unwrap_or_default() == 1 && has_tiles_with_hash(&mut *conn).await?) +} + +pub async fn is_flat_tables_type(conn: &mut T) -> MbtResult +where + for<'e> &'e mut T: SqliteExecutor<'e>, +{ + let sql = query!( + "SELECT ( + -- Has a 'tiles' table + SELECT COUNT(*) = 1 + FROM sqlite_master + WHERE name = 'tiles' + AND type = 'table' + -- + ) AND ( + -- 'tiles' table's columns and their types are as expected: + -- 4 columns (zoom_level, tile_column, tile_row, tile_data). + -- The order is not important + SELECT COUNT(*) = 4 + FROM pragma_table_info('tiles') + WHERE ((name = 'zoom_level' AND type = 'INTEGER') + OR (name = 'tile_column' AND type = 'INTEGER') + OR (name = 'tile_row' AND type = 'INTEGER') + OR (name = 'tile_data' AND type = 'BLOB')) + -- + ) as is_valid;" + ); + + Ok(sql + .fetch_one(&mut *conn) + .await? + .is_valid + .unwrap_or_default() + == 1) +} + +pub async fn create_metadata_table(conn: &mut T) -> MbtResult<()> +where + for<'e> &'e mut T: SqliteExecutor<'e>, +{ + debug!("Creating metadata table if it doesn't already exist"); + conn.execute( + "CREATE TABLE IF NOT EXISTS metadata ( + name text NOT NULL PRIMARY KEY, + value text);", + ) + .await?; + + Ok(()) +} + +pub async fn create_flat_tables(conn: &mut T) -> MbtResult<()> +where + for<'e> &'e mut T: SqliteExecutor<'e>, +{ + create_metadata_table(&mut *conn).await?; + + debug!("Creating if needed flat table: tiles(z,x,y,data)"); + conn.execute( + "CREATE TABLE IF NOT EXISTS tiles ( + zoom_level integer NOT NULL, + tile_column integer NOT NULL, + tile_row integer NOT NULL, + tile_data blob, + PRIMARY KEY(zoom_level, tile_column, tile_row));", + ) + .await?; + + Ok(()) +} + +pub async fn create_flat_with_hash_tables(conn: &mut T) -> MbtResult<()> +where + for<'e> &'e mut T: SqliteExecutor<'e>, +{ + create_metadata_table(&mut *conn).await?; + + debug!("Creating if needed flat-with-hash table: tiles_with_hash(z,x,y,data,hash)"); + conn.execute( + "CREATE TABLE IF NOT EXISTS tiles_with_hash ( + zoom_level integer NOT NULL, + tile_column integer NOT NULL, + tile_row integer NOT NULL, + tile_data blob, + tile_hash text, + PRIMARY KEY(zoom_level, tile_column, tile_row));", + ) + .await?; + + debug!("Creating if needed tiles view for flat-with-hash"); + conn.execute( + "CREATE VIEW IF NOT EXISTS tiles AS + SELECT zoom_level, tile_column, tile_row, tile_data FROM tiles_with_hash;", + ) + .await?; + + Ok(()) +} + +pub async fn create_normalized_tables(conn: &mut T) -> MbtResult<()> +where + for<'e> &'e mut T: SqliteExecutor<'e>, +{ + create_metadata_table(&mut *conn).await?; + + debug!("Creating if needed normalized table: map(z,x,y,id)"); + conn.execute( + "CREATE TABLE IF NOT EXISTS map ( + zoom_level integer NOT NULL, + tile_column integer NOT NULL, + tile_row integer NOT NULL, + tile_id text, + PRIMARY KEY(zoom_level, tile_column, tile_row));", + ) + .await?; + + debug!("Creating if needed normalized table: images(id,data)"); + conn.execute( + "CREATE TABLE IF NOT EXISTS images ( + tile_id text NOT NULL PRIMARY KEY, + tile_data blob);", + ) + .await?; + + debug!("Creating if needed tiles view for flat-with-hash"); + conn.execute( + "CREATE VIEW IF NOT EXISTS tiles AS + SELECT map.zoom_level AS zoom_level, + map.tile_column AS tile_column, + map.tile_row AS tile_row, + images.tile_data AS tile_data + FROM map + JOIN images ON images.tile_id = map.tile_id;", + ) + .await?; + + Ok(()) +} + +pub async fn create_tiles_with_hash_view(conn: &mut T) -> MbtResult<()> +where + for<'e> &'e mut T: SqliteExecutor<'e>, +{ + debug!("Creating if needed tiles_with_hash view for normalized map+images structure"); + conn.execute( + "CREATE VIEW IF NOT EXISTS tiles_with_hash AS + SELECT + map.zoom_level AS zoom_level, + map.tile_column AS tile_column, + map.tile_row AS tile_row, + images.tile_data AS tile_data, + images.tile_id AS tile_hash + FROM map + JOIN images ON images.tile_id = map.tile_id", + ) + .await?; + + Ok(()) +} + +pub async fn detach_db(conn: &mut T, name: &str) -> MbtResult<()> +where + for<'e> &'e mut T: SqliteExecutor<'e>, +{ + debug!("Detaching {name}"); + query(&format!("DETACH DATABASE {name}")) + .execute(conn) + .await?; + Ok(()) +} diff --git a/mbtiles/tests/mbtiles.rs b/mbtiles/tests/mbtiles.rs new file mode 100644 index 000000000..e15031004 --- /dev/null +++ b/mbtiles/tests/mbtiles.rs @@ -0,0 +1,485 @@ +use std::collections::HashMap; +use std::path::PathBuf; +use std::str::from_utf8; + +use ctor::ctor; +use insta::{allow_duplicates, assert_display_snapshot}; +use log::info; +use mbtiles::IntegrityCheckType::Off; +use mbtiles::MbtTypeCli::{Flat, FlatWithHash, Normalized}; +use mbtiles::{apply_patch, create_flat_tables, MbtResult, MbtTypeCli, Mbtiles, MbtilesCopier}; +use pretty_assertions::assert_eq as pretty_assert_eq; +use rstest::{fixture, rstest}; +use serde::Serialize; +use sqlx::{query, query_as, Executor as _, Row, SqliteConnection}; + +const TILES_V1: &str = " + INSERT INTO tiles (zoom_level, tile_column, tile_row, tile_data) VALUES + --(z, x, y, data) -- rules: keep if x=0, edit if x=1, remove if x=2 + (5, 0, 0, cast('same' as blob)) + , (5, 0, 1, cast('' as blob)) -- empty tile, keep + , (5, 1, 1, cast('edit-v1' as blob)) + , (5, 1, 2, cast('' as blob)) -- empty tile, edit + , (5, 1, 3, cast('non-empty' as blob)) -- non empty tile to edit + , (5, 2, 2, cast('remove' as blob)) + , (5, 2, 3, cast('' as blob)) -- empty tile, remove + , (6, 0, 3, cast('same' as blob)) + , (6, 1, 4, cast('edit-v1' as blob)) + , (6, 0, 5, cast('1-keep-1-rm' as blob)) + , (6, 2, 6, cast('1-keep-1-rm' as blob)) + ;"; + +const TILES_V2: &str = " + INSERT INTO tiles (zoom_level, tile_column, tile_row, tile_data) VALUES + (5, 0, 0, cast('same' as blob)) -- no changes + , (5, 0, 1, cast('' as blob)) -- no changes, empty tile + , (5, 1, 1, cast('edit-v2' as blob)) -- edited in-place + , (5, 1, 2, cast('not-empty' as blob)) -- edited in-place, replaced empty with non-empty + , (5, 1, 3, cast('' as blob)) -- edited in-place, replaced non-empty with empty + -- , (5, 2, 2, cast('remove' as blob)) -- this row is removed + -- , (5, 2, 3, cast('' as blob)) -- empty tile, removed + , (6, 0, 3, cast('same' as blob)) -- no changes, same content as 5/0/0 + , (6, 1, 4, cast('edit-v2a' as blob)) -- edited, used to be same as 5/1/1 + , (6, 0, 5, cast('1-keep-1-rm' as blob)) -- this row is kept (same content as next) + -- , (6, 2, 6, cast('1-keep-1-rm' as blob)) -- this row is removed + , (5, 3, 7, cast('new' as blob)) -- this row is added, dup value + , (5, 3, 8, cast('new' as blob)) -- this row is added, dup value + + -- Expected delta: + -- 5/1/1 edit + -- 5/1/2 edit + -- 5/1/3 edit + -- 5/2/2 remove + -- 5/2/3 remove + -- 5/3/7 add + -- 5/3/8 add + -- 6/1/4 edit + -- 6/2/6 remove + ;"; + +const METADATA_V1: &str = " + INSERT INTO metadata (name, value) VALUES + ('md-same', 'value - same') + , ('md-edit', 'value - v1') + , ('md-remove', 'value - remove') + ;"; + +const METADATA_V2: &str = " + INSERT INTO metadata (name, value) VALUES + ('md-same', 'value - same') + , ('md-edit', 'value - v2') + , ('md-new', 'value - new') + ;"; + +#[ctor] +fn init() { + let _ = env_logger::builder().is_test(true).try_init(); +} + +fn path(mbt: &Mbtiles) -> PathBuf { + PathBuf::from(mbt.filepath()) +} + +fn copier(src: &Mbtiles, dst: &Mbtiles) -> MbtilesCopier { + MbtilesCopier::new(path(src), path(dst)) +} + +fn shorten(v: MbtTypeCli) -> &'static str { + match v { + Flat => "flat", + FlatWithHash => "hash", + Normalized => "norm", + } +} + +async fn open(file: &str) -> MbtResult<(Mbtiles, SqliteConnection)> { + let mbtiles = Mbtiles::new(file)?; + let conn = mbtiles.open().await?; + Ok((mbtiles, conn)) +} + +macro_rules! open { + ($function:ident, $($arg:tt)*) => { + open!(@"", $function, $($arg)*) + }; + (@$extra:literal, $function:tt, $($arg:tt)*) => {{ + let file = format!("file:{}_{}{}?mode=memory&cache=shared", stringify!($function), format_args!($($arg)*), $extra); + open(&file).await.unwrap() + }}; +} + +/// Create a new SQLite file of given type without agg_tiles_hash metadata value +macro_rules! new_file_no_hash { + ($function:ident, $dst_type_cli:expr, $sql_meta:expr, $sql_data:expr, $($arg:tt)*) => {{ + new_file!(@true, $function, $dst_type_cli, $sql_meta, $sql_data, $($arg)*) + }}; +} + +/// Create a new SQLite file of type $dst_type_cli with the given metadata and tiles +macro_rules! new_file { + ($function:ident, $dst_type_cli:expr, $sql_meta:expr, $sql_data:expr, $($arg:tt)*) => { + new_file!(@false, $function, $dst_type_cli, $sql_meta, $sql_data, $($arg)*) + }; + + (@$skip_agg:expr, $function:tt, $dst_type_cli:expr, $sql_meta:expr, $sql_data:expr, $($arg:tt)*) => {{ + let (tmp_mbt, mut cn_tmp) = open!(@"temp", $function, $($arg)*); + create_flat_tables(&mut cn_tmp).await.unwrap(); + cn_tmp.execute($sql_data).await.unwrap(); + cn_tmp.execute($sql_meta).await.unwrap(); + + let (dst_mbt, cn_dst) = open!($function, $($arg)*); + let mut opt = copier(&tmp_mbt, &dst_mbt); + opt.dst_type_cli = Some($dst_type_cli); + opt.skip_agg_tiles_hash = $skip_agg; + opt.run().await.unwrap(); + + (dst_mbt, cn_dst) + }}; +} + +macro_rules! assert_snapshot { + ($actual_value:expr, $($arg:tt)*) => {{ + let mut settings = insta::Settings::clone_current(); + settings.set_snapshot_suffix(format!($($arg)*)); + let actual_value = &$actual_value; + settings.bind(|| insta::assert_toml_snapshot!(actual_value)); + }}; +} + +#[derive(Default)] +struct Databases( + HashMap<(&'static str, MbtTypeCli), (Vec, Mbtiles, SqliteConnection)>, +); + +impl Databases { + fn add( + &mut self, + name: &'static str, + typ: MbtTypeCli, + dump: Vec, + mbtiles: Mbtiles, + conn: SqliteConnection, + ) { + self.0.insert((name, typ), (dump, mbtiles, conn)); + } + fn dump(&self, name: &'static str, typ: MbtTypeCli) -> &Vec { + &self.0.get(&(name, typ)).unwrap().0 + } + fn mbtiles(&self, name: &'static str, typ: MbtTypeCli) -> &Mbtiles { + &self.0.get(&(name, typ)).unwrap().1 + } +} + +/// Generate a set of databases for testing, and validate them against snapshot files. +/// These dbs will be used by other tests to check against in various conditions. +#[fixture] +#[once] +fn databases() -> Databases { + futures::executor::block_on(async { + let mut result = Databases::default(); + for &mbt_typ in &[Flat, FlatWithHash, Normalized] { + let typ = shorten(mbt_typ); + let (raw_mbt, mut raw_cn) = new_file_no_hash!( + databases, + mbt_typ, + METADATA_V1, + TILES_V1, + "{typ}__v1-no-hash" + ); + let dmp = dump(&mut raw_cn).await.unwrap(); + assert_snapshot!(&dmp, "{typ}__v1-no-hash"); + result.add("v1_no_hash", mbt_typ, dmp, raw_mbt, raw_cn); + + let (v1_mbt, mut v1_cn) = open!(databases, "{typ}__v1"); + let raw_mbt = result.mbtiles("v1_no_hash", mbt_typ); + copier(raw_mbt, &v1_mbt).run().await.unwrap(); + let dmp = dump(&mut v1_cn).await.unwrap(); + assert_snapshot!(&dmp, "{typ}__v1"); + let hash = v1_mbt.validate(Off, false).await.unwrap(); + allow_duplicates! { + assert_display_snapshot!(hash, @"096A8399D486CF443A5DF0CEC1AD8BB2"); + } + result.add("v1", mbt_typ, dmp, v1_mbt, v1_cn); + + let (v2_mbt, mut v2_cn) = + new_file!(databases, mbt_typ, METADATA_V2, TILES_V2, "{typ}__v2"); + let dmp = dump(&mut v2_cn).await.unwrap(); + assert_snapshot!(&dmp, "{typ}__v2"); + let hash = v2_mbt.validate(Off, false).await.unwrap(); + allow_duplicates! { + assert_display_snapshot!(hash, @"FE0D3090E8B4E89F2C755C08E8D76BEA"); + } + result.add("v2", mbt_typ, dmp, v2_mbt, v2_cn); + + let (dif_mbt, mut dif_cn) = open!(databases, "{typ}__dif"); + let v1_mbt = result.mbtiles("v1", mbt_typ); + let mut opt = copier(v1_mbt, &dif_mbt); + let v2_mbt = result.mbtiles("v2", mbt_typ); + opt.diff_with_file = Some(path(v2_mbt)); + opt.run().await.unwrap(); + let dmp = dump(&mut dif_cn).await.unwrap(); + assert_snapshot!(&dmp, "{typ}__dif"); + let hash = dif_mbt.validate(Off, false).await.unwrap(); + allow_duplicates! { + assert_display_snapshot!(hash, @"B86122579EDCDD4C51F3910894FCC1A1"); + } + result.add("dif", mbt_typ, dmp, dif_mbt, dif_cn); + } + result + }) +} + +#[rstest] +#[trace] +#[actix_rt::test] +async fn convert( + #[values(Flat, FlatWithHash, Normalized)] frm_type: MbtTypeCli, + #[values(Flat, FlatWithHash, Normalized)] dst_type: MbtTypeCli, + #[notrace] databases: &Databases, +) -> MbtResult<()> { + let (frm, to) = (shorten(frm_type), shorten(dst_type)); + let mem = Mbtiles::new(":memory:")?; + let (frm_mbt, _frm_cn) = new_file!(convert, frm_type, METADATA_V1, TILES_V1, "{frm}-{to}"); + + let mut opt = copier(&frm_mbt, &mem); + opt.dst_type_cli = Some(dst_type); + let dmp = dump(&mut opt.run().await?).await?; + pretty_assert_eq!(databases.dump("v1", dst_type), &dmp); + + let mut opt = copier(&frm_mbt, &mem); + opt.dst_type_cli = Some(dst_type); + opt.zoom_levels.insert(6); + let z6only = dump(&mut opt.run().await?).await?; + assert_snapshot!(z6only, "v1__z6__{frm}-{to}"); + + let mut opt = copier(&frm_mbt, &mem); + opt.dst_type_cli = Some(dst_type); + opt.min_zoom = Some(6); + pretty_assert_eq!(&z6only, &dump(&mut opt.run().await?).await?); + + let mut opt = copier(&frm_mbt, &mem); + opt.dst_type_cli = Some(dst_type); + opt.min_zoom = Some(6); + opt.max_zoom = Some(6); + pretty_assert_eq!(&z6only, &dump(&mut opt.run().await?).await?); + + Ok(()) +} + +#[rstest] +#[trace] +#[actix_rt::test] +async fn diff_and_patch( + #[values(Flat, FlatWithHash, Normalized)] v1_type: MbtTypeCli, + #[values(Flat, FlatWithHash, Normalized)] v2_type: MbtTypeCli, + #[values(None, Some(Flat), Some(FlatWithHash), Some(Normalized))] dif_type: Option, + #[notrace] databases: &Databases, +) -> MbtResult<()> { + let (v1, v2) = (shorten(v1_type), shorten(v2_type)); + let dif = dif_type.map(shorten).unwrap_or("dflt"); + let prefix = format!("{v2}-{v1}={dif}"); + + let v1_mbt = databases.mbtiles("v1", v1_type); + let v2_mbt = databases.mbtiles("v2", v2_type); + let (dif_mbt, mut dif_cn) = open!(diff_and_patchdiff_and_patch, "{prefix}__dif"); + + info!("TEST: Compare v1 with v2, and copy anything that's different (i.e. mathematically: v2-v1=diff)"); + let mut opt = copier(v1_mbt, &dif_mbt); + opt.diff_with_file = Some(path(v2_mbt)); + if let Some(dif_type) = dif_type { + opt.dst_type_cli = Some(dif_type); + } + opt.run().await?; + pretty_assert_eq!( + &dump(&mut dif_cn).await?, + databases.dump("dif", dif_type.unwrap_or(v1_type)) + ); + + for target_type in &[Flat, FlatWithHash, Normalized] { + let trg = shorten(*target_type); + let prefix = format!("{prefix}__to__{trg}"); + let expected_v2 = databases.dump("v2", *target_type); + + info!("TEST: Applying the difference (v2-v1=diff) to v1, should get v2"); + let (tar1_mbt, mut tar1_cn) = new_file!( + diff_and_patch, + *target_type, + METADATA_V1, + TILES_V1, + "{prefix}__v1" + ); + apply_patch(path(&tar1_mbt), path(&dif_mbt)).await?; + let hash_v1 = tar1_mbt.validate(Off, false).await?; + allow_duplicates! { + assert_display_snapshot!(hash_v1, @"FE0D3090E8B4E89F2C755C08E8D76BEA"); + } + let dmp = dump(&mut tar1_cn).await?; + pretty_assert_eq!(&dmp, expected_v2); + + info!("TEST: Applying the difference (v2-v1=diff) to v2, should not modify it"); + let (tar2_mbt, mut tar2_cn) = + new_file! {diff_and_patch, *target_type, METADATA_V2, TILES_V2, "{prefix}__v2"}; + apply_patch(path(&tar2_mbt), path(&dif_mbt)).await?; + let hash_v2 = tar2_mbt.validate(Off, false).await?; + allow_duplicates! { + assert_display_snapshot!(hash_v2, @"FE0D3090E8B4E89F2C755C08E8D76BEA"); + } + let dmp = dump(&mut tar2_cn).await?; + pretty_assert_eq!(&dmp, expected_v2); + } + + Ok(()) +} + +#[rstest] +#[trace] +#[actix_rt::test] +async fn patch_on_copy( + #[values(Flat, FlatWithHash, Normalized)] v1_type: MbtTypeCli, + #[values(Flat, FlatWithHash, Normalized)] dif_type: MbtTypeCli, + #[values(None, Some(Flat), Some(FlatWithHash), Some(Normalized))] v2_type: Option, + #[notrace] databases: &Databases, +) -> MbtResult<()> { + let (v1, dif) = (shorten(v1_type), shorten(dif_type)); + let v2 = v2_type.map(shorten).unwrap_or("dflt"); + let prefix = format!("{v1}+{dif}={v2}"); + + let v1_mbt = databases.mbtiles("v1", v1_type); + let dif_mbt = databases.mbtiles("dif", dif_type); + let (v2_mbt, mut v2_cn) = open!(patch_on_copy, "{prefix}__v2"); + + info!("TEST: Compare v1 with v2, and copy anything that's different (i.e. mathematically: v2-v1=diff)"); + let mut opt = copier(v1_mbt, &v2_mbt); + opt.apply_patch = Some(path(dif_mbt)); + if let Some(v2_type) = v2_type { + opt.dst_type_cli = Some(v2_type); + } + opt.run().await?; + pretty_assert_eq!( + &dump(&mut v2_cn).await?, + databases.dump("v2", v2_type.unwrap_or(v1_type)) + ); + + Ok(()) +} + +/// A simple tester to run specific values +#[actix_rt::test] +#[ignore] +async fn test_one() { + let src_type = FlatWithHash; + let dif_type = FlatWithHash; + // let dst_type = Some(FlatWithHash); + let dst_type = None; + let db = databases(); + + diff_and_patch(src_type, dif_type, dst_type, &db) + .await + .unwrap(); + patch_on_copy(src_type, dif_type, dst_type, &db) + .await + .unwrap(); + panic!("ALWAYS FAIL - this test is for debugging only, and should be disabled"); +} + +#[derive(Debug, sqlx::FromRow, Serialize, PartialEq)] +struct SqliteEntry { + pub r#type: Option, + pub tbl_name: Option, + pub sql: Option, + #[sqlx(skip)] + pub values: Option>, +} + +async fn dump(conn: &mut SqliteConnection) -> MbtResult> { + let mut result = Vec::new(); + + let schema: Vec = query_as( + "SELECT type, tbl_name, sql + FROM sqlite_schema + ORDER BY type != 'table', type, tbl_name", + ) + .fetch_all(&mut *conn) + .await?; + + for mut entry in schema { + let tbl = match (&entry.r#type, &entry.tbl_name) { + (Some(typ), Some(tbl)) if typ == "table" => tbl, + _ => { + result.push(entry); + continue; + } + }; + + let sql = format!("PRAGMA table_info({tbl})"); + let columns: Vec<_> = query(&sql) + .fetch_all(&mut *conn) + .await? + .into_iter() + .map(|row| { + let cid: i32 = row.get(0); + let typ: String = row.get(2); + (cid as usize, typ) + }) + .collect(); + + let sql = format!("SELECT * FROM {tbl}"); + let rows = query(&sql).fetch_all(&mut *conn).await?; + let mut values = rows + .iter() + .map(|row| { + let val = columns + .iter() + .map(|(idx, typ)| { + // use sqlx::ValueRef as _; + // let raw = row.try_get_raw(*idx).unwrap(); + // if raw.is_null() { + // return "NULL".to_string(); + // } + // if let Ok(v) = row.try_get::(idx) { + // return format!(r#""{v}""#); + // } + // if let Ok(v) = row.try_get::, _>(idx) { + // return format!("blob({})", from_utf8(&v).unwrap()); + // } + // if let Ok(v) = row.try_get::(idx) { + // return v.to_string(); + // } + // if let Ok(v) = row.try_get::(idx) { + // return v.to_string(); + // } + // panic!("Unknown column type: {typ}"); + (match typ.as_str() { + "INTEGER" => row.get::, _>(idx).map(|v| v.to_string()), + "REAL" => row.get::, _>(idx).map(|v| v.to_string()), + "TEXT" => row + .get::, _>(idx) + .map(|v| format!(r#""{v}""#)), + "BLOB" => row + .get::>, _>(idx) + .map(|v| format!("blob({})", from_utf8(&v).unwrap())), + _ => panic!("Unknown column type: {typ}"), + }) + .unwrap_or("NULL".to_string()) + }) + .collect::>() + .join(", "); + format!("( {val} )") + }) + .collect::>(); + + values.sort(); + entry.values = Some(values); + result.push(entry); + } + + Ok(result) +} + +#[allow(dead_code)] +async fn save_to_file(source_mbt: &Mbtiles, path: &str) -> MbtResult<()> { + let mut opt = copier(source_mbt, &Mbtiles::new(path)?); + opt.skip_agg_tiles_hash = true; + opt.run().await?; + Ok(()) +} diff --git a/mbtiles/tests/snapshots/mbtiles__convert@v1__z6__flat-flat.snap b/mbtiles/tests/snapshots/mbtiles__convert@v1__z6__flat-flat.snap new file mode 100644 index 000000000..4570d4c73 --- /dev/null +++ b/mbtiles/tests/snapshots/mbtiles__convert@v1__z6__flat-flat.snap @@ -0,0 +1,42 @@ +--- +source: mbtiles/tests/mbtiles.rs +expression: actual_value +--- +[[]] +type = 'table' +tbl_name = 'metadata' +sql = ''' +CREATE TABLE metadata ( + name text NOT NULL PRIMARY KEY, + value text)''' +values = [ + '( "agg_tiles_hash", "675349A4153AEC0679BE9C0637AEEBCC" )', + '( "md-edit", "value - v1" )', + '( "md-remove", "value - remove" )', + '( "md-same", "value - same" )', +] + +[[]] +type = 'table' +tbl_name = 'tiles' +sql = ''' +CREATE TABLE tiles ( + zoom_level integer NOT NULL, + tile_column integer NOT NULL, + tile_row integer NOT NULL, + tile_data blob, + PRIMARY KEY(zoom_level, tile_column, tile_row))''' +values = [ + '( 6, 0, 3, blob(same) )', + '( 6, 0, 5, blob(1-keep-1-rm) )', + '( 6, 1, 4, blob(edit-v1) )', + '( 6, 2, 6, blob(1-keep-1-rm) )', +] + +[[]] +type = 'index' +tbl_name = 'metadata' + +[[]] +type = 'index' +tbl_name = 'tiles' diff --git a/mbtiles/tests/snapshots/mbtiles__convert@v1__z6__flat-hash.snap b/mbtiles/tests/snapshots/mbtiles__convert@v1__z6__flat-hash.snap new file mode 100644 index 000000000..76f3e7040 --- /dev/null +++ b/mbtiles/tests/snapshots/mbtiles__convert@v1__z6__flat-hash.snap @@ -0,0 +1,50 @@ +--- +source: mbtiles/tests/mbtiles.rs +expression: actual_value +--- +[[]] +type = 'table' +tbl_name = 'metadata' +sql = ''' +CREATE TABLE metadata ( + name text NOT NULL PRIMARY KEY, + value text)''' +values = [ + '( "agg_tiles_hash", "675349A4153AEC0679BE9C0637AEEBCC" )', + '( "md-edit", "value - v1" )', + '( "md-remove", "value - remove" )', + '( "md-same", "value - same" )', +] + +[[]] +type = 'table' +tbl_name = 'tiles_with_hash' +sql = ''' +CREATE TABLE tiles_with_hash ( + zoom_level integer NOT NULL, + tile_column integer NOT NULL, + tile_row integer NOT NULL, + tile_data blob, + tile_hash text, + PRIMARY KEY(zoom_level, tile_column, tile_row))''' +values = [ + '( 6, 0, 3, blob(same), "51037A4A37730F52C8732586D3AAA316" )', + '( 6, 0, 5, blob(1-keep-1-rm), "535A5575B48444EDEB926815AB26EC9B" )', + '( 6, 1, 4, blob(edit-v1), "EFE0AE5FD114DE99855BC2838BE97E1D" )', + '( 6, 2, 6, blob(1-keep-1-rm), "535A5575B48444EDEB926815AB26EC9B" )', +] + +[[]] +type = 'index' +tbl_name = 'metadata' + +[[]] +type = 'index' +tbl_name = 'tiles_with_hash' + +[[]] +type = 'view' +tbl_name = 'tiles' +sql = ''' +CREATE VIEW tiles AS + SELECT zoom_level, tile_column, tile_row, tile_data FROM tiles_with_hash''' diff --git a/mbtiles/tests/snapshots/mbtiles__convert@v1__z6__flat-norm.snap b/mbtiles/tests/snapshots/mbtiles__convert@v1__z6__flat-norm.snap new file mode 100644 index 000000000..bce869b3b --- /dev/null +++ b/mbtiles/tests/snapshots/mbtiles__convert@v1__z6__flat-norm.snap @@ -0,0 +1,85 @@ +--- +source: mbtiles/tests/mbtiles.rs +expression: actual_value +--- +[[]] +type = 'table' +tbl_name = 'images' +sql = ''' +CREATE TABLE images ( + tile_id text NOT NULL PRIMARY KEY, + tile_data blob)''' +values = [ + '( "51037A4A37730F52C8732586D3AAA316", blob(same) )', + '( "535A5575B48444EDEB926815AB26EC9B", blob(1-keep-1-rm) )', + '( "EFE0AE5FD114DE99855BC2838BE97E1D", blob(edit-v1) )', +] + +[[]] +type = 'table' +tbl_name = 'map' +sql = ''' +CREATE TABLE map ( + zoom_level integer NOT NULL, + tile_column integer NOT NULL, + tile_row integer NOT NULL, + tile_id text, + PRIMARY KEY(zoom_level, tile_column, tile_row))''' +values = [ + '( 6, 0, 3, "51037A4A37730F52C8732586D3AAA316" )', + '( 6, 0, 5, "535A5575B48444EDEB926815AB26EC9B" )', + '( 6, 1, 4, "EFE0AE5FD114DE99855BC2838BE97E1D" )', + '( 6, 2, 6, "535A5575B48444EDEB926815AB26EC9B" )', +] + +[[]] +type = 'table' +tbl_name = 'metadata' +sql = ''' +CREATE TABLE metadata ( + name text NOT NULL PRIMARY KEY, + value text)''' +values = [ + '( "agg_tiles_hash", "675349A4153AEC0679BE9C0637AEEBCC" )', + '( "md-edit", "value - v1" )', + '( "md-remove", "value - remove" )', + '( "md-same", "value - same" )', +] + +[[]] +type = 'index' +tbl_name = 'images' + +[[]] +type = 'index' +tbl_name = 'map' + +[[]] +type = 'index' +tbl_name = 'metadata' + +[[]] +type = 'view' +tbl_name = 'tiles' +sql = ''' +CREATE VIEW tiles AS + SELECT map.zoom_level AS zoom_level, + map.tile_column AS tile_column, + map.tile_row AS tile_row, + images.tile_data AS tile_data + FROM map + JOIN images ON images.tile_id = map.tile_id''' + +[[]] +type = 'view' +tbl_name = 'tiles_with_hash' +sql = ''' +CREATE VIEW tiles_with_hash AS + SELECT + map.zoom_level AS zoom_level, + map.tile_column AS tile_column, + map.tile_row AS tile_row, + images.tile_data AS tile_data, + images.tile_id AS tile_hash + FROM map + JOIN images ON images.tile_id = map.tile_id''' diff --git a/mbtiles/tests/snapshots/mbtiles__convert@v1__z6__hash-flat.snap b/mbtiles/tests/snapshots/mbtiles__convert@v1__z6__hash-flat.snap new file mode 100644 index 000000000..4570d4c73 --- /dev/null +++ b/mbtiles/tests/snapshots/mbtiles__convert@v1__z6__hash-flat.snap @@ -0,0 +1,42 @@ +--- +source: mbtiles/tests/mbtiles.rs +expression: actual_value +--- +[[]] +type = 'table' +tbl_name = 'metadata' +sql = ''' +CREATE TABLE metadata ( + name text NOT NULL PRIMARY KEY, + value text)''' +values = [ + '( "agg_tiles_hash", "675349A4153AEC0679BE9C0637AEEBCC" )', + '( "md-edit", "value - v1" )', + '( "md-remove", "value - remove" )', + '( "md-same", "value - same" )', +] + +[[]] +type = 'table' +tbl_name = 'tiles' +sql = ''' +CREATE TABLE tiles ( + zoom_level integer NOT NULL, + tile_column integer NOT NULL, + tile_row integer NOT NULL, + tile_data blob, + PRIMARY KEY(zoom_level, tile_column, tile_row))''' +values = [ + '( 6, 0, 3, blob(same) )', + '( 6, 0, 5, blob(1-keep-1-rm) )', + '( 6, 1, 4, blob(edit-v1) )', + '( 6, 2, 6, blob(1-keep-1-rm) )', +] + +[[]] +type = 'index' +tbl_name = 'metadata' + +[[]] +type = 'index' +tbl_name = 'tiles' diff --git a/mbtiles/tests/snapshots/mbtiles__convert@v1__z6__hash-hash.snap b/mbtiles/tests/snapshots/mbtiles__convert@v1__z6__hash-hash.snap new file mode 100644 index 000000000..76f3e7040 --- /dev/null +++ b/mbtiles/tests/snapshots/mbtiles__convert@v1__z6__hash-hash.snap @@ -0,0 +1,50 @@ +--- +source: mbtiles/tests/mbtiles.rs +expression: actual_value +--- +[[]] +type = 'table' +tbl_name = 'metadata' +sql = ''' +CREATE TABLE metadata ( + name text NOT NULL PRIMARY KEY, + value text)''' +values = [ + '( "agg_tiles_hash", "675349A4153AEC0679BE9C0637AEEBCC" )', + '( "md-edit", "value - v1" )', + '( "md-remove", "value - remove" )', + '( "md-same", "value - same" )', +] + +[[]] +type = 'table' +tbl_name = 'tiles_with_hash' +sql = ''' +CREATE TABLE tiles_with_hash ( + zoom_level integer NOT NULL, + tile_column integer NOT NULL, + tile_row integer NOT NULL, + tile_data blob, + tile_hash text, + PRIMARY KEY(zoom_level, tile_column, tile_row))''' +values = [ + '( 6, 0, 3, blob(same), "51037A4A37730F52C8732586D3AAA316" )', + '( 6, 0, 5, blob(1-keep-1-rm), "535A5575B48444EDEB926815AB26EC9B" )', + '( 6, 1, 4, blob(edit-v1), "EFE0AE5FD114DE99855BC2838BE97E1D" )', + '( 6, 2, 6, blob(1-keep-1-rm), "535A5575B48444EDEB926815AB26EC9B" )', +] + +[[]] +type = 'index' +tbl_name = 'metadata' + +[[]] +type = 'index' +tbl_name = 'tiles_with_hash' + +[[]] +type = 'view' +tbl_name = 'tiles' +sql = ''' +CREATE VIEW tiles AS + SELECT zoom_level, tile_column, tile_row, tile_data FROM tiles_with_hash''' diff --git a/mbtiles/tests/snapshots/mbtiles__convert@v1__z6__hash-norm.snap b/mbtiles/tests/snapshots/mbtiles__convert@v1__z6__hash-norm.snap new file mode 100644 index 000000000..bce869b3b --- /dev/null +++ b/mbtiles/tests/snapshots/mbtiles__convert@v1__z6__hash-norm.snap @@ -0,0 +1,85 @@ +--- +source: mbtiles/tests/mbtiles.rs +expression: actual_value +--- +[[]] +type = 'table' +tbl_name = 'images' +sql = ''' +CREATE TABLE images ( + tile_id text NOT NULL PRIMARY KEY, + tile_data blob)''' +values = [ + '( "51037A4A37730F52C8732586D3AAA316", blob(same) )', + '( "535A5575B48444EDEB926815AB26EC9B", blob(1-keep-1-rm) )', + '( "EFE0AE5FD114DE99855BC2838BE97E1D", blob(edit-v1) )', +] + +[[]] +type = 'table' +tbl_name = 'map' +sql = ''' +CREATE TABLE map ( + zoom_level integer NOT NULL, + tile_column integer NOT NULL, + tile_row integer NOT NULL, + tile_id text, + PRIMARY KEY(zoom_level, tile_column, tile_row))''' +values = [ + '( 6, 0, 3, "51037A4A37730F52C8732586D3AAA316" )', + '( 6, 0, 5, "535A5575B48444EDEB926815AB26EC9B" )', + '( 6, 1, 4, "EFE0AE5FD114DE99855BC2838BE97E1D" )', + '( 6, 2, 6, "535A5575B48444EDEB926815AB26EC9B" )', +] + +[[]] +type = 'table' +tbl_name = 'metadata' +sql = ''' +CREATE TABLE metadata ( + name text NOT NULL PRIMARY KEY, + value text)''' +values = [ + '( "agg_tiles_hash", "675349A4153AEC0679BE9C0637AEEBCC" )', + '( "md-edit", "value - v1" )', + '( "md-remove", "value - remove" )', + '( "md-same", "value - same" )', +] + +[[]] +type = 'index' +tbl_name = 'images' + +[[]] +type = 'index' +tbl_name = 'map' + +[[]] +type = 'index' +tbl_name = 'metadata' + +[[]] +type = 'view' +tbl_name = 'tiles' +sql = ''' +CREATE VIEW tiles AS + SELECT map.zoom_level AS zoom_level, + map.tile_column AS tile_column, + map.tile_row AS tile_row, + images.tile_data AS tile_data + FROM map + JOIN images ON images.tile_id = map.tile_id''' + +[[]] +type = 'view' +tbl_name = 'tiles_with_hash' +sql = ''' +CREATE VIEW tiles_with_hash AS + SELECT + map.zoom_level AS zoom_level, + map.tile_column AS tile_column, + map.tile_row AS tile_row, + images.tile_data AS tile_data, + images.tile_id AS tile_hash + FROM map + JOIN images ON images.tile_id = map.tile_id''' diff --git a/mbtiles/tests/snapshots/mbtiles__convert@v1__z6__norm-flat.snap b/mbtiles/tests/snapshots/mbtiles__convert@v1__z6__norm-flat.snap new file mode 100644 index 000000000..4570d4c73 --- /dev/null +++ b/mbtiles/tests/snapshots/mbtiles__convert@v1__z6__norm-flat.snap @@ -0,0 +1,42 @@ +--- +source: mbtiles/tests/mbtiles.rs +expression: actual_value +--- +[[]] +type = 'table' +tbl_name = 'metadata' +sql = ''' +CREATE TABLE metadata ( + name text NOT NULL PRIMARY KEY, + value text)''' +values = [ + '( "agg_tiles_hash", "675349A4153AEC0679BE9C0637AEEBCC" )', + '( "md-edit", "value - v1" )', + '( "md-remove", "value - remove" )', + '( "md-same", "value - same" )', +] + +[[]] +type = 'table' +tbl_name = 'tiles' +sql = ''' +CREATE TABLE tiles ( + zoom_level integer NOT NULL, + tile_column integer NOT NULL, + tile_row integer NOT NULL, + tile_data blob, + PRIMARY KEY(zoom_level, tile_column, tile_row))''' +values = [ + '( 6, 0, 3, blob(same) )', + '( 6, 0, 5, blob(1-keep-1-rm) )', + '( 6, 1, 4, blob(edit-v1) )', + '( 6, 2, 6, blob(1-keep-1-rm) )', +] + +[[]] +type = 'index' +tbl_name = 'metadata' + +[[]] +type = 'index' +tbl_name = 'tiles' diff --git a/mbtiles/tests/snapshots/mbtiles__convert@v1__z6__norm-hash.snap b/mbtiles/tests/snapshots/mbtiles__convert@v1__z6__norm-hash.snap new file mode 100644 index 000000000..76f3e7040 --- /dev/null +++ b/mbtiles/tests/snapshots/mbtiles__convert@v1__z6__norm-hash.snap @@ -0,0 +1,50 @@ +--- +source: mbtiles/tests/mbtiles.rs +expression: actual_value +--- +[[]] +type = 'table' +tbl_name = 'metadata' +sql = ''' +CREATE TABLE metadata ( + name text NOT NULL PRIMARY KEY, + value text)''' +values = [ + '( "agg_tiles_hash", "675349A4153AEC0679BE9C0637AEEBCC" )', + '( "md-edit", "value - v1" )', + '( "md-remove", "value - remove" )', + '( "md-same", "value - same" )', +] + +[[]] +type = 'table' +tbl_name = 'tiles_with_hash' +sql = ''' +CREATE TABLE tiles_with_hash ( + zoom_level integer NOT NULL, + tile_column integer NOT NULL, + tile_row integer NOT NULL, + tile_data blob, + tile_hash text, + PRIMARY KEY(zoom_level, tile_column, tile_row))''' +values = [ + '( 6, 0, 3, blob(same), "51037A4A37730F52C8732586D3AAA316" )', + '( 6, 0, 5, blob(1-keep-1-rm), "535A5575B48444EDEB926815AB26EC9B" )', + '( 6, 1, 4, blob(edit-v1), "EFE0AE5FD114DE99855BC2838BE97E1D" )', + '( 6, 2, 6, blob(1-keep-1-rm), "535A5575B48444EDEB926815AB26EC9B" )', +] + +[[]] +type = 'index' +tbl_name = 'metadata' + +[[]] +type = 'index' +tbl_name = 'tiles_with_hash' + +[[]] +type = 'view' +tbl_name = 'tiles' +sql = ''' +CREATE VIEW tiles AS + SELECT zoom_level, tile_column, tile_row, tile_data FROM tiles_with_hash''' diff --git a/mbtiles/tests/snapshots/mbtiles__convert@v1__z6__norm-norm.snap b/mbtiles/tests/snapshots/mbtiles__convert@v1__z6__norm-norm.snap new file mode 100644 index 000000000..bce869b3b --- /dev/null +++ b/mbtiles/tests/snapshots/mbtiles__convert@v1__z6__norm-norm.snap @@ -0,0 +1,85 @@ +--- +source: mbtiles/tests/mbtiles.rs +expression: actual_value +--- +[[]] +type = 'table' +tbl_name = 'images' +sql = ''' +CREATE TABLE images ( + tile_id text NOT NULL PRIMARY KEY, + tile_data blob)''' +values = [ + '( "51037A4A37730F52C8732586D3AAA316", blob(same) )', + '( "535A5575B48444EDEB926815AB26EC9B", blob(1-keep-1-rm) )', + '( "EFE0AE5FD114DE99855BC2838BE97E1D", blob(edit-v1) )', +] + +[[]] +type = 'table' +tbl_name = 'map' +sql = ''' +CREATE TABLE map ( + zoom_level integer NOT NULL, + tile_column integer NOT NULL, + tile_row integer NOT NULL, + tile_id text, + PRIMARY KEY(zoom_level, tile_column, tile_row))''' +values = [ + '( 6, 0, 3, "51037A4A37730F52C8732586D3AAA316" )', + '( 6, 0, 5, "535A5575B48444EDEB926815AB26EC9B" )', + '( 6, 1, 4, "EFE0AE5FD114DE99855BC2838BE97E1D" )', + '( 6, 2, 6, "535A5575B48444EDEB926815AB26EC9B" )', +] + +[[]] +type = 'table' +tbl_name = 'metadata' +sql = ''' +CREATE TABLE metadata ( + name text NOT NULL PRIMARY KEY, + value text)''' +values = [ + '( "agg_tiles_hash", "675349A4153AEC0679BE9C0637AEEBCC" )', + '( "md-edit", "value - v1" )', + '( "md-remove", "value - remove" )', + '( "md-same", "value - same" )', +] + +[[]] +type = 'index' +tbl_name = 'images' + +[[]] +type = 'index' +tbl_name = 'map' + +[[]] +type = 'index' +tbl_name = 'metadata' + +[[]] +type = 'view' +tbl_name = 'tiles' +sql = ''' +CREATE VIEW tiles AS + SELECT map.zoom_level AS zoom_level, + map.tile_column AS tile_column, + map.tile_row AS tile_row, + images.tile_data AS tile_data + FROM map + JOIN images ON images.tile_id = map.tile_id''' + +[[]] +type = 'view' +tbl_name = 'tiles_with_hash' +sql = ''' +CREATE VIEW tiles_with_hash AS + SELECT + map.zoom_level AS zoom_level, + map.tile_column AS tile_column, + map.tile_row AS tile_row, + images.tile_data AS tile_data, + images.tile_id AS tile_hash + FROM map + JOIN images ON images.tile_id = map.tile_id''' diff --git a/mbtiles/tests/snapshots/mbtiles__databases@flat__dif.snap b/mbtiles/tests/snapshots/mbtiles__databases@flat__dif.snap new file mode 100644 index 000000000..d5b5003ec --- /dev/null +++ b/mbtiles/tests/snapshots/mbtiles__databases@flat__dif.snap @@ -0,0 +1,48 @@ +--- +source: mbtiles/tests/mbtiles.rs +expression: actual_value +--- +[[]] +type = 'table' +tbl_name = 'metadata' +sql = ''' +CREATE TABLE metadata ( + name text NOT NULL PRIMARY KEY, + value text)''' +values = [ + '( "agg_tiles_hash", "B86122579EDCDD4C51F3910894FCC1A1" )', + '( "agg_tiles_hash_after_apply", "FE0D3090E8B4E89F2C755C08E8D76BEA" )', + '( "md-edit", "value - v2" )', + '( "md-new", "value - new" )', + '( "md-remove", NULL )', +] + +[[]] +type = 'table' +tbl_name = 'tiles' +sql = ''' +CREATE TABLE tiles ( + zoom_level integer NOT NULL, + tile_column integer NOT NULL, + tile_row integer NOT NULL, + tile_data blob, + PRIMARY KEY(zoom_level, tile_column, tile_row))''' +values = [ + '( 5, 1, 1, blob(edit-v2) )', + '( 5, 1, 2, blob(not-empty) )', + '( 5, 1, 3, blob() )', + '( 5, 2, 2, NULL )', + '( 5, 2, 3, NULL )', + '( 5, 3, 7, blob(new) )', + '( 5, 3, 8, blob(new) )', + '( 6, 1, 4, blob(edit-v2a) )', + '( 6, 2, 6, NULL )', +] + +[[]] +type = 'index' +tbl_name = 'metadata' + +[[]] +type = 'index' +tbl_name = 'tiles' diff --git a/mbtiles/tests/snapshots/mbtiles__databases@flat__v1-no-hash.snap b/mbtiles/tests/snapshots/mbtiles__databases@flat__v1-no-hash.snap new file mode 100644 index 000000000..d2d27f6d4 --- /dev/null +++ b/mbtiles/tests/snapshots/mbtiles__databases@flat__v1-no-hash.snap @@ -0,0 +1,48 @@ +--- +source: mbtiles/tests/mbtiles.rs +expression: actual_value +--- +[[]] +type = 'table' +tbl_name = 'metadata' +sql = ''' +CREATE TABLE metadata ( + name text NOT NULL PRIMARY KEY, + value text)''' +values = [ + '( "md-edit", "value - v1" )', + '( "md-remove", "value - remove" )', + '( "md-same", "value - same" )', +] + +[[]] +type = 'table' +tbl_name = 'tiles' +sql = ''' +CREATE TABLE tiles ( + zoom_level integer NOT NULL, + tile_column integer NOT NULL, + tile_row integer NOT NULL, + tile_data blob, + PRIMARY KEY(zoom_level, tile_column, tile_row))''' +values = [ + '( 5, 0, 0, blob(same) )', + '( 5, 0, 1, blob() )', + '( 5, 1, 1, blob(edit-v1) )', + '( 5, 1, 2, blob() )', + '( 5, 1, 3, blob(non-empty) )', + '( 5, 2, 2, blob(remove) )', + '( 5, 2, 3, blob() )', + '( 6, 0, 3, blob(same) )', + '( 6, 0, 5, blob(1-keep-1-rm) )', + '( 6, 1, 4, blob(edit-v1) )', + '( 6, 2, 6, blob(1-keep-1-rm) )', +] + +[[]] +type = 'index' +tbl_name = 'metadata' + +[[]] +type = 'index' +tbl_name = 'tiles' diff --git a/mbtiles/tests/snapshots/mbtiles__databases@flat__v1.snap b/mbtiles/tests/snapshots/mbtiles__databases@flat__v1.snap new file mode 100644 index 000000000..9f40369d0 --- /dev/null +++ b/mbtiles/tests/snapshots/mbtiles__databases@flat__v1.snap @@ -0,0 +1,49 @@ +--- +source: mbtiles/tests/mbtiles.rs +expression: actual_value +--- +[[]] +type = 'table' +tbl_name = 'metadata' +sql = ''' +CREATE TABLE metadata ( + name text NOT NULL PRIMARY KEY, + value text)''' +values = [ + '( "agg_tiles_hash", "096A8399D486CF443A5DF0CEC1AD8BB2" )', + '( "md-edit", "value - v1" )', + '( "md-remove", "value - remove" )', + '( "md-same", "value - same" )', +] + +[[]] +type = 'table' +tbl_name = 'tiles' +sql = ''' +CREATE TABLE tiles ( + zoom_level integer NOT NULL, + tile_column integer NOT NULL, + tile_row integer NOT NULL, + tile_data blob, + PRIMARY KEY(zoom_level, tile_column, tile_row))''' +values = [ + '( 5, 0, 0, blob(same) )', + '( 5, 0, 1, blob() )', + '( 5, 1, 1, blob(edit-v1) )', + '( 5, 1, 2, blob() )', + '( 5, 1, 3, blob(non-empty) )', + '( 5, 2, 2, blob(remove) )', + '( 5, 2, 3, blob() )', + '( 6, 0, 3, blob(same) )', + '( 6, 0, 5, blob(1-keep-1-rm) )', + '( 6, 1, 4, blob(edit-v1) )', + '( 6, 2, 6, blob(1-keep-1-rm) )', +] + +[[]] +type = 'index' +tbl_name = 'metadata' + +[[]] +type = 'index' +tbl_name = 'tiles' diff --git a/mbtiles/tests/snapshots/mbtiles__databases@flat__v2.snap b/mbtiles/tests/snapshots/mbtiles__databases@flat__v2.snap new file mode 100644 index 000000000..af065171f --- /dev/null +++ b/mbtiles/tests/snapshots/mbtiles__databases@flat__v2.snap @@ -0,0 +1,48 @@ +--- +source: mbtiles/tests/mbtiles.rs +expression: actual_value +--- +[[]] +type = 'table' +tbl_name = 'metadata' +sql = ''' +CREATE TABLE metadata ( + name text NOT NULL PRIMARY KEY, + value text)''' +values = [ + '( "agg_tiles_hash", "FE0D3090E8B4E89F2C755C08E8D76BEA" )', + '( "md-edit", "value - v2" )', + '( "md-new", "value - new" )', + '( "md-same", "value - same" )', +] + +[[]] +type = 'table' +tbl_name = 'tiles' +sql = ''' +CREATE TABLE tiles ( + zoom_level integer NOT NULL, + tile_column integer NOT NULL, + tile_row integer NOT NULL, + tile_data blob, + PRIMARY KEY(zoom_level, tile_column, tile_row))''' +values = [ + '( 5, 0, 0, blob(same) )', + '( 5, 0, 1, blob() )', + '( 5, 1, 1, blob(edit-v2) )', + '( 5, 1, 2, blob(not-empty) )', + '( 5, 1, 3, blob() )', + '( 5, 3, 7, blob(new) )', + '( 5, 3, 8, blob(new) )', + '( 6, 0, 3, blob(same) )', + '( 6, 0, 5, blob(1-keep-1-rm) )', + '( 6, 1, 4, blob(edit-v2a) )', +] + +[[]] +type = 'index' +tbl_name = 'metadata' + +[[]] +type = 'index' +tbl_name = 'tiles' diff --git a/mbtiles/tests/snapshots/mbtiles__databases@hash__dif.snap b/mbtiles/tests/snapshots/mbtiles__databases@hash__dif.snap new file mode 100644 index 000000000..7584b75db --- /dev/null +++ b/mbtiles/tests/snapshots/mbtiles__databases@hash__dif.snap @@ -0,0 +1,56 @@ +--- +source: mbtiles/tests/mbtiles.rs +expression: actual_value +--- +[[]] +type = 'table' +tbl_name = 'metadata' +sql = ''' +CREATE TABLE metadata ( + name text NOT NULL PRIMARY KEY, + value text)''' +values = [ + '( "agg_tiles_hash", "B86122579EDCDD4C51F3910894FCC1A1" )', + '( "agg_tiles_hash_after_apply", "FE0D3090E8B4E89F2C755C08E8D76BEA" )', + '( "md-edit", "value - v2" )', + '( "md-new", "value - new" )', + '( "md-remove", NULL )', +] + +[[]] +type = 'table' +tbl_name = 'tiles_with_hash' +sql = ''' +CREATE TABLE tiles_with_hash ( + zoom_level integer NOT NULL, + tile_column integer NOT NULL, + tile_row integer NOT NULL, + tile_data blob, + tile_hash text, + PRIMARY KEY(zoom_level, tile_column, tile_row))''' +values = [ + '( 5, 1, 1, blob(edit-v2), "FF76830FF90D79BB335884F256031731" )', + '( 5, 1, 2, blob(not-empty), "99DEE0E66806ECF1C20C09F64B2C0A34" )', + '( 5, 1, 3, blob(), "D41D8CD98F00B204E9800998ECF8427E" )', + '( 5, 2, 2, NULL, "" )', + '( 5, 2, 3, NULL, "" )', + '( 5, 3, 7, blob(new), "22AF645D1859CB5CA6DA0C484F1F37EA" )', + '( 5, 3, 8, blob(new), "22AF645D1859CB5CA6DA0C484F1F37EA" )', + '( 6, 1, 4, blob(edit-v2a), "03132BFACDB00CC63D6B7DD98D974DD5" )', + '( 6, 2, 6, NULL, "" )', +] + +[[]] +type = 'index' +tbl_name = 'metadata' + +[[]] +type = 'index' +tbl_name = 'tiles_with_hash' + +[[]] +type = 'view' +tbl_name = 'tiles' +sql = ''' +CREATE VIEW tiles AS + SELECT zoom_level, tile_column, tile_row, tile_data FROM tiles_with_hash''' diff --git a/mbtiles/tests/snapshots/mbtiles__databases@hash__v1-no-hash.snap b/mbtiles/tests/snapshots/mbtiles__databases@hash__v1-no-hash.snap new file mode 100644 index 000000000..163c7566e --- /dev/null +++ b/mbtiles/tests/snapshots/mbtiles__databases@hash__v1-no-hash.snap @@ -0,0 +1,56 @@ +--- +source: mbtiles/tests/mbtiles.rs +expression: actual_value +--- +[[]] +type = 'table' +tbl_name = 'metadata' +sql = ''' +CREATE TABLE metadata ( + name text NOT NULL PRIMARY KEY, + value text)''' +values = [ + '( "md-edit", "value - v1" )', + '( "md-remove", "value - remove" )', + '( "md-same", "value - same" )', +] + +[[]] +type = 'table' +tbl_name = 'tiles_with_hash' +sql = ''' +CREATE TABLE tiles_with_hash ( + zoom_level integer NOT NULL, + tile_column integer NOT NULL, + tile_row integer NOT NULL, + tile_data blob, + tile_hash text, + PRIMARY KEY(zoom_level, tile_column, tile_row))''' +values = [ + '( 5, 0, 0, blob(same), "51037A4A37730F52C8732586D3AAA316" )', + '( 5, 0, 1, blob(), "D41D8CD98F00B204E9800998ECF8427E" )', + '( 5, 1, 1, blob(edit-v1), "EFE0AE5FD114DE99855BC2838BE97E1D" )', + '( 5, 1, 2, blob(), "D41D8CD98F00B204E9800998ECF8427E" )', + '( 5, 1, 3, blob(non-empty), "720C02778717818CC0A869955BA2AFB6" )', + '( 5, 2, 2, blob(remove), "0F6969D7052DA9261E31DDB6E88C136E" )', + '( 5, 2, 3, blob(), "D41D8CD98F00B204E9800998ECF8427E" )', + '( 6, 0, 3, blob(same), "51037A4A37730F52C8732586D3AAA316" )', + '( 6, 0, 5, blob(1-keep-1-rm), "535A5575B48444EDEB926815AB26EC9B" )', + '( 6, 1, 4, blob(edit-v1), "EFE0AE5FD114DE99855BC2838BE97E1D" )', + '( 6, 2, 6, blob(1-keep-1-rm), "535A5575B48444EDEB926815AB26EC9B" )', +] + +[[]] +type = 'index' +tbl_name = 'metadata' + +[[]] +type = 'index' +tbl_name = 'tiles_with_hash' + +[[]] +type = 'view' +tbl_name = 'tiles' +sql = ''' +CREATE VIEW tiles AS + SELECT zoom_level, tile_column, tile_row, tile_data FROM tiles_with_hash''' diff --git a/mbtiles/tests/snapshots/mbtiles__databases@hash__v1.snap b/mbtiles/tests/snapshots/mbtiles__databases@hash__v1.snap new file mode 100644 index 000000000..a9c1e4ea1 --- /dev/null +++ b/mbtiles/tests/snapshots/mbtiles__databases@hash__v1.snap @@ -0,0 +1,57 @@ +--- +source: mbtiles/tests/mbtiles.rs +expression: actual_value +--- +[[]] +type = 'table' +tbl_name = 'metadata' +sql = ''' +CREATE TABLE metadata ( + name text NOT NULL PRIMARY KEY, + value text)''' +values = [ + '( "agg_tiles_hash", "096A8399D486CF443A5DF0CEC1AD8BB2" )', + '( "md-edit", "value - v1" )', + '( "md-remove", "value - remove" )', + '( "md-same", "value - same" )', +] + +[[]] +type = 'table' +tbl_name = 'tiles_with_hash' +sql = ''' +CREATE TABLE tiles_with_hash ( + zoom_level integer NOT NULL, + tile_column integer NOT NULL, + tile_row integer NOT NULL, + tile_data blob, + tile_hash text, + PRIMARY KEY(zoom_level, tile_column, tile_row))''' +values = [ + '( 5, 0, 0, blob(same), "51037A4A37730F52C8732586D3AAA316" )', + '( 5, 0, 1, blob(), "D41D8CD98F00B204E9800998ECF8427E" )', + '( 5, 1, 1, blob(edit-v1), "EFE0AE5FD114DE99855BC2838BE97E1D" )', + '( 5, 1, 2, blob(), "D41D8CD98F00B204E9800998ECF8427E" )', + '( 5, 1, 3, blob(non-empty), "720C02778717818CC0A869955BA2AFB6" )', + '( 5, 2, 2, blob(remove), "0F6969D7052DA9261E31DDB6E88C136E" )', + '( 5, 2, 3, blob(), "D41D8CD98F00B204E9800998ECF8427E" )', + '( 6, 0, 3, blob(same), "51037A4A37730F52C8732586D3AAA316" )', + '( 6, 0, 5, blob(1-keep-1-rm), "535A5575B48444EDEB926815AB26EC9B" )', + '( 6, 1, 4, blob(edit-v1), "EFE0AE5FD114DE99855BC2838BE97E1D" )', + '( 6, 2, 6, blob(1-keep-1-rm), "535A5575B48444EDEB926815AB26EC9B" )', +] + +[[]] +type = 'index' +tbl_name = 'metadata' + +[[]] +type = 'index' +tbl_name = 'tiles_with_hash' + +[[]] +type = 'view' +tbl_name = 'tiles' +sql = ''' +CREATE VIEW tiles AS + SELECT zoom_level, tile_column, tile_row, tile_data FROM tiles_with_hash''' diff --git a/mbtiles/tests/snapshots/mbtiles__databases@hash__v2.snap b/mbtiles/tests/snapshots/mbtiles__databases@hash__v2.snap new file mode 100644 index 000000000..abfa9ceff --- /dev/null +++ b/mbtiles/tests/snapshots/mbtiles__databases@hash__v2.snap @@ -0,0 +1,56 @@ +--- +source: mbtiles/tests/mbtiles.rs +expression: actual_value +--- +[[]] +type = 'table' +tbl_name = 'metadata' +sql = ''' +CREATE TABLE metadata ( + name text NOT NULL PRIMARY KEY, + value text)''' +values = [ + '( "agg_tiles_hash", "FE0D3090E8B4E89F2C755C08E8D76BEA" )', + '( "md-edit", "value - v2" )', + '( "md-new", "value - new" )', + '( "md-same", "value - same" )', +] + +[[]] +type = 'table' +tbl_name = 'tiles_with_hash' +sql = ''' +CREATE TABLE tiles_with_hash ( + zoom_level integer NOT NULL, + tile_column integer NOT NULL, + tile_row integer NOT NULL, + tile_data blob, + tile_hash text, + PRIMARY KEY(zoom_level, tile_column, tile_row))''' +values = [ + '( 5, 0, 0, blob(same), "51037A4A37730F52C8732586D3AAA316" )', + '( 5, 0, 1, blob(), "D41D8CD98F00B204E9800998ECF8427E" )', + '( 5, 1, 1, blob(edit-v2), "FF76830FF90D79BB335884F256031731" )', + '( 5, 1, 2, blob(not-empty), "99DEE0E66806ECF1C20C09F64B2C0A34" )', + '( 5, 1, 3, blob(), "D41D8CD98F00B204E9800998ECF8427E" )', + '( 5, 3, 7, blob(new), "22AF645D1859CB5CA6DA0C484F1F37EA" )', + '( 5, 3, 8, blob(new), "22AF645D1859CB5CA6DA0C484F1F37EA" )', + '( 6, 0, 3, blob(same), "51037A4A37730F52C8732586D3AAA316" )', + '( 6, 0, 5, blob(1-keep-1-rm), "535A5575B48444EDEB926815AB26EC9B" )', + '( 6, 1, 4, blob(edit-v2a), "03132BFACDB00CC63D6B7DD98D974DD5" )', +] + +[[]] +type = 'index' +tbl_name = 'metadata' + +[[]] +type = 'index' +tbl_name = 'tiles_with_hash' + +[[]] +type = 'view' +tbl_name = 'tiles' +sql = ''' +CREATE VIEW tiles AS + SELECT zoom_level, tile_column, tile_row, tile_data FROM tiles_with_hash''' diff --git a/mbtiles/tests/snapshots/mbtiles__databases@norm__dif.snap b/mbtiles/tests/snapshots/mbtiles__databases@norm__dif.snap new file mode 100644 index 000000000..95cf04a3b --- /dev/null +++ b/mbtiles/tests/snapshots/mbtiles__databases@norm__dif.snap @@ -0,0 +1,94 @@ +--- +source: mbtiles/tests/mbtiles.rs +expression: actual_value +--- +[[]] +type = 'table' +tbl_name = 'images' +sql = ''' +CREATE TABLE images ( + tile_id text NOT NULL PRIMARY KEY, + tile_data blob)''' +values = [ + '( "", NULL )', + '( "03132BFACDB00CC63D6B7DD98D974DD5", blob(edit-v2a) )', + '( "22AF645D1859CB5CA6DA0C484F1F37EA", blob(new) )', + '( "99DEE0E66806ECF1C20C09F64B2C0A34", blob(not-empty) )', + '( "D41D8CD98F00B204E9800998ECF8427E", blob() )', + '( "FF76830FF90D79BB335884F256031731", blob(edit-v2) )', +] + +[[]] +type = 'table' +tbl_name = 'map' +sql = ''' +CREATE TABLE map ( + zoom_level integer NOT NULL, + tile_column integer NOT NULL, + tile_row integer NOT NULL, + tile_id text, + PRIMARY KEY(zoom_level, tile_column, tile_row))''' +values = [ + '( 5, 1, 1, "FF76830FF90D79BB335884F256031731" )', + '( 5, 1, 2, "99DEE0E66806ECF1C20C09F64B2C0A34" )', + '( 5, 1, 3, "D41D8CD98F00B204E9800998ECF8427E" )', + '( 5, 2, 2, "" )', + '( 5, 2, 3, "" )', + '( 5, 3, 7, "22AF645D1859CB5CA6DA0C484F1F37EA" )', + '( 5, 3, 8, "22AF645D1859CB5CA6DA0C484F1F37EA" )', + '( 6, 1, 4, "03132BFACDB00CC63D6B7DD98D974DD5" )', + '( 6, 2, 6, "" )', +] + +[[]] +type = 'table' +tbl_name = 'metadata' +sql = ''' +CREATE TABLE metadata ( + name text NOT NULL PRIMARY KEY, + value text)''' +values = [ + '( "agg_tiles_hash", "B86122579EDCDD4C51F3910894FCC1A1" )', + '( "agg_tiles_hash_after_apply", "FE0D3090E8B4E89F2C755C08E8D76BEA" )', + '( "md-edit", "value - v2" )', + '( "md-new", "value - new" )', + '( "md-remove", NULL )', +] + +[[]] +type = 'index' +tbl_name = 'images' + +[[]] +type = 'index' +tbl_name = 'map' + +[[]] +type = 'index' +tbl_name = 'metadata' + +[[]] +type = 'view' +tbl_name = 'tiles' +sql = ''' +CREATE VIEW tiles AS + SELECT map.zoom_level AS zoom_level, + map.tile_column AS tile_column, + map.tile_row AS tile_row, + images.tile_data AS tile_data + FROM map + JOIN images ON images.tile_id = map.tile_id''' + +[[]] +type = 'view' +tbl_name = 'tiles_with_hash' +sql = ''' +CREATE VIEW tiles_with_hash AS + SELECT + map.zoom_level AS zoom_level, + map.tile_column AS tile_column, + map.tile_row AS tile_row, + images.tile_data AS tile_data, + images.tile_id AS tile_hash + FROM map + JOIN images ON images.tile_id = map.tile_id''' diff --git a/mbtiles/tests/snapshots/mbtiles__databases@norm__v1-no-hash.snap b/mbtiles/tests/snapshots/mbtiles__databases@norm__v1-no-hash.snap new file mode 100644 index 000000000..79da9dcff --- /dev/null +++ b/mbtiles/tests/snapshots/mbtiles__databases@norm__v1-no-hash.snap @@ -0,0 +1,94 @@ +--- +source: mbtiles/tests/mbtiles.rs +expression: actual_value +--- +[[]] +type = 'table' +tbl_name = 'images' +sql = ''' +CREATE TABLE images ( + tile_id text NOT NULL PRIMARY KEY, + tile_data blob)''' +values = [ + '( "0F6969D7052DA9261E31DDB6E88C136E", blob(remove) )', + '( "51037A4A37730F52C8732586D3AAA316", blob(same) )', + '( "535A5575B48444EDEB926815AB26EC9B", blob(1-keep-1-rm) )', + '( "720C02778717818CC0A869955BA2AFB6", blob(non-empty) )', + '( "D41D8CD98F00B204E9800998ECF8427E", blob() )', + '( "EFE0AE5FD114DE99855BC2838BE97E1D", blob(edit-v1) )', +] + +[[]] +type = 'table' +tbl_name = 'map' +sql = ''' +CREATE TABLE map ( + zoom_level integer NOT NULL, + tile_column integer NOT NULL, + tile_row integer NOT NULL, + tile_id text, + PRIMARY KEY(zoom_level, tile_column, tile_row))''' +values = [ + '( 5, 0, 0, "51037A4A37730F52C8732586D3AAA316" )', + '( 5, 0, 1, "D41D8CD98F00B204E9800998ECF8427E" )', + '( 5, 1, 1, "EFE0AE5FD114DE99855BC2838BE97E1D" )', + '( 5, 1, 2, "D41D8CD98F00B204E9800998ECF8427E" )', + '( 5, 1, 3, "720C02778717818CC0A869955BA2AFB6" )', + '( 5, 2, 2, "0F6969D7052DA9261E31DDB6E88C136E" )', + '( 5, 2, 3, "D41D8CD98F00B204E9800998ECF8427E" )', + '( 6, 0, 3, "51037A4A37730F52C8732586D3AAA316" )', + '( 6, 0, 5, "535A5575B48444EDEB926815AB26EC9B" )', + '( 6, 1, 4, "EFE0AE5FD114DE99855BC2838BE97E1D" )', + '( 6, 2, 6, "535A5575B48444EDEB926815AB26EC9B" )', +] + +[[]] +type = 'table' +tbl_name = 'metadata' +sql = ''' +CREATE TABLE metadata ( + name text NOT NULL PRIMARY KEY, + value text)''' +values = [ + '( "md-edit", "value - v1" )', + '( "md-remove", "value - remove" )', + '( "md-same", "value - same" )', +] + +[[]] +type = 'index' +tbl_name = 'images' + +[[]] +type = 'index' +tbl_name = 'map' + +[[]] +type = 'index' +tbl_name = 'metadata' + +[[]] +type = 'view' +tbl_name = 'tiles' +sql = ''' +CREATE VIEW tiles AS + SELECT map.zoom_level AS zoom_level, + map.tile_column AS tile_column, + map.tile_row AS tile_row, + images.tile_data AS tile_data + FROM map + JOIN images ON images.tile_id = map.tile_id''' + +[[]] +type = 'view' +tbl_name = 'tiles_with_hash' +sql = ''' +CREATE VIEW tiles_with_hash AS + SELECT + map.zoom_level AS zoom_level, + map.tile_column AS tile_column, + map.tile_row AS tile_row, + images.tile_data AS tile_data, + images.tile_id AS tile_hash + FROM map + JOIN images ON images.tile_id = map.tile_id''' diff --git a/mbtiles/tests/snapshots/mbtiles__databases@norm__v1.snap b/mbtiles/tests/snapshots/mbtiles__databases@norm__v1.snap new file mode 100644 index 000000000..32d7e7078 --- /dev/null +++ b/mbtiles/tests/snapshots/mbtiles__databases@norm__v1.snap @@ -0,0 +1,95 @@ +--- +source: mbtiles/tests/mbtiles.rs +expression: actual_value +--- +[[]] +type = 'table' +tbl_name = 'images' +sql = ''' +CREATE TABLE images ( + tile_id text NOT NULL PRIMARY KEY, + tile_data blob)''' +values = [ + '( "0F6969D7052DA9261E31DDB6E88C136E", blob(remove) )', + '( "51037A4A37730F52C8732586D3AAA316", blob(same) )', + '( "535A5575B48444EDEB926815AB26EC9B", blob(1-keep-1-rm) )', + '( "720C02778717818CC0A869955BA2AFB6", blob(non-empty) )', + '( "D41D8CD98F00B204E9800998ECF8427E", blob() )', + '( "EFE0AE5FD114DE99855BC2838BE97E1D", blob(edit-v1) )', +] + +[[]] +type = 'table' +tbl_name = 'map' +sql = ''' +CREATE TABLE map ( + zoom_level integer NOT NULL, + tile_column integer NOT NULL, + tile_row integer NOT NULL, + tile_id text, + PRIMARY KEY(zoom_level, tile_column, tile_row))''' +values = [ + '( 5, 0, 0, "51037A4A37730F52C8732586D3AAA316" )', + '( 5, 0, 1, "D41D8CD98F00B204E9800998ECF8427E" )', + '( 5, 1, 1, "EFE0AE5FD114DE99855BC2838BE97E1D" )', + '( 5, 1, 2, "D41D8CD98F00B204E9800998ECF8427E" )', + '( 5, 1, 3, "720C02778717818CC0A869955BA2AFB6" )', + '( 5, 2, 2, "0F6969D7052DA9261E31DDB6E88C136E" )', + '( 5, 2, 3, "D41D8CD98F00B204E9800998ECF8427E" )', + '( 6, 0, 3, "51037A4A37730F52C8732586D3AAA316" )', + '( 6, 0, 5, "535A5575B48444EDEB926815AB26EC9B" )', + '( 6, 1, 4, "EFE0AE5FD114DE99855BC2838BE97E1D" )', + '( 6, 2, 6, "535A5575B48444EDEB926815AB26EC9B" )', +] + +[[]] +type = 'table' +tbl_name = 'metadata' +sql = ''' +CREATE TABLE metadata ( + name text NOT NULL PRIMARY KEY, + value text)''' +values = [ + '( "agg_tiles_hash", "096A8399D486CF443A5DF0CEC1AD8BB2" )', + '( "md-edit", "value - v1" )', + '( "md-remove", "value - remove" )', + '( "md-same", "value - same" )', +] + +[[]] +type = 'index' +tbl_name = 'images' + +[[]] +type = 'index' +tbl_name = 'map' + +[[]] +type = 'index' +tbl_name = 'metadata' + +[[]] +type = 'view' +tbl_name = 'tiles' +sql = ''' +CREATE VIEW tiles AS + SELECT map.zoom_level AS zoom_level, + map.tile_column AS tile_column, + map.tile_row AS tile_row, + images.tile_data AS tile_data + FROM map + JOIN images ON images.tile_id = map.tile_id''' + +[[]] +type = 'view' +tbl_name = 'tiles_with_hash' +sql = ''' +CREATE VIEW tiles_with_hash AS + SELECT + map.zoom_level AS zoom_level, + map.tile_column AS tile_column, + map.tile_row AS tile_row, + images.tile_data AS tile_data, + images.tile_id AS tile_hash + FROM map + JOIN images ON images.tile_id = map.tile_id''' diff --git a/mbtiles/tests/snapshots/mbtiles__databases@norm__v2.snap b/mbtiles/tests/snapshots/mbtiles__databases@norm__v2.snap new file mode 100644 index 000000000..e7d9aac70 --- /dev/null +++ b/mbtiles/tests/snapshots/mbtiles__databases@norm__v2.snap @@ -0,0 +1,95 @@ +--- +source: mbtiles/tests/mbtiles.rs +expression: actual_value +--- +[[]] +type = 'table' +tbl_name = 'images' +sql = ''' +CREATE TABLE images ( + tile_id text NOT NULL PRIMARY KEY, + tile_data blob)''' +values = [ + '( "03132BFACDB00CC63D6B7DD98D974DD5", blob(edit-v2a) )', + '( "22AF645D1859CB5CA6DA0C484F1F37EA", blob(new) )', + '( "51037A4A37730F52C8732586D3AAA316", blob(same) )', + '( "535A5575B48444EDEB926815AB26EC9B", blob(1-keep-1-rm) )', + '( "99DEE0E66806ECF1C20C09F64B2C0A34", blob(not-empty) )', + '( "D41D8CD98F00B204E9800998ECF8427E", blob() )', + '( "FF76830FF90D79BB335884F256031731", blob(edit-v2) )', +] + +[[]] +type = 'table' +tbl_name = 'map' +sql = ''' +CREATE TABLE map ( + zoom_level integer NOT NULL, + tile_column integer NOT NULL, + tile_row integer NOT NULL, + tile_id text, + PRIMARY KEY(zoom_level, tile_column, tile_row))''' +values = [ + '( 5, 0, 0, "51037A4A37730F52C8732586D3AAA316" )', + '( 5, 0, 1, "D41D8CD98F00B204E9800998ECF8427E" )', + '( 5, 1, 1, "FF76830FF90D79BB335884F256031731" )', + '( 5, 1, 2, "99DEE0E66806ECF1C20C09F64B2C0A34" )', + '( 5, 1, 3, "D41D8CD98F00B204E9800998ECF8427E" )', + '( 5, 3, 7, "22AF645D1859CB5CA6DA0C484F1F37EA" )', + '( 5, 3, 8, "22AF645D1859CB5CA6DA0C484F1F37EA" )', + '( 6, 0, 3, "51037A4A37730F52C8732586D3AAA316" )', + '( 6, 0, 5, "535A5575B48444EDEB926815AB26EC9B" )', + '( 6, 1, 4, "03132BFACDB00CC63D6B7DD98D974DD5" )', +] + +[[]] +type = 'table' +tbl_name = 'metadata' +sql = ''' +CREATE TABLE metadata ( + name text NOT NULL PRIMARY KEY, + value text)''' +values = [ + '( "agg_tiles_hash", "FE0D3090E8B4E89F2C755C08E8D76BEA" )', + '( "md-edit", "value - v2" )', + '( "md-new", "value - new" )', + '( "md-same", "value - same" )', +] + +[[]] +type = 'index' +tbl_name = 'images' + +[[]] +type = 'index' +tbl_name = 'map' + +[[]] +type = 'index' +tbl_name = 'metadata' + +[[]] +type = 'view' +tbl_name = 'tiles' +sql = ''' +CREATE VIEW tiles AS + SELECT map.zoom_level AS zoom_level, + map.tile_column AS tile_column, + map.tile_row AS tile_row, + images.tile_data AS tile_data + FROM map + JOIN images ON images.tile_id = map.tile_id''' + +[[]] +type = 'view' +tbl_name = 'tiles_with_hash' +sql = ''' +CREATE VIEW tiles_with_hash AS + SELECT + map.zoom_level AS zoom_level, + map.tile_column AS tile_column, + map.tile_row AS tile_row, + images.tile_data AS tile_data, + images.tile_id AS tile_hash + FROM map + JOIN images ON images.tile_id = map.tile_id''' diff --git a/multi-platform.Dockerfile b/multi-platform.Dockerfile index e1fe2f4ee..b7420a3da 100644 --- a/multi-platform.Dockerfile +++ b/multi-platform.Dockerfile @@ -1,4 +1,4 @@ -FROM ubuntu:latest +FROM alpine ARG TARGETPLATFORM LABEL org.opencontainers.image.description="Blazing fast and lightweight tile server with PostGIS, MBTiles, and PMTiles support" diff --git a/tests/config.yaml b/tests/config.yaml index 1f9dc1c66..cecc48aa7 100644 --- a/tests/config.yaml +++ b/tests/config.yaml @@ -162,9 +162,13 @@ postgres: pmtiles: sources: - pmt: tests/fixtures/files/stamen_toner__raster_CC-BY+ODbL_z3.pmtiles + pmt: tests/fixtures/pmtiles/stamen_toner__raster_CC-BY+ODbL_z3.pmtiles sprites: paths: tests/fixtures/sprites/src1 sources: mysrc: tests/fixtures/sprites/src2 + +fonts: + - tests/fixtures/fonts/overpass-mono-regular.ttf + - tests/fixtures/fonts diff --git a/tests/expected/auto/catalog_auto.json b/tests/expected/auto/catalog_auto.json index 1de39eb3b..fd502f77f 100644 --- a/tests/expected/auto/catalog_auto.json +++ b/tests/expected/auto/catalog_auto.json @@ -162,5 +162,30 @@ "name": "Major cities from Natural Earth data", "description": "Major cities from Natural Earth data" } + }, + "sprites": { + "src1": { + "images": [ + "another_bicycle", + "bear", + "sub/circle" + ] + } + }, + "fonts": { + "Overpass Mono Light": { + "family": "Overpass Mono", + "style": "Light", + "glyphs": 931, + "start": 0, + "end": 64258 + }, + "Overpass Mono Regular": { + "family": "Overpass Mono", + "style": "Regular", + "glyphs": 931, + "start": 0, + "end": 64258 + } } } diff --git a/tests/expected/auto/cmp.json b/tests/expected/auto/cmp.json index 234b2bc3d..161878bc1 100644 --- a/tests/expected/auto/cmp.json +++ b/tests/expected/auto/cmp.json @@ -23,6 +23,12 @@ } } ], + "bounds": [ + -179.27313970132585, + -80.46177157848345, + 179.11187181086706, + 84.93092095128937 + ], "description": "public.points1.geom\npublic.points2.geom", "name": "table_source,points1,points2" } diff --git a/tests/expected/auto/mb_jpg_0_0_0.png b/tests/expected/auto/mb_jpg_0_0_0.jpg similarity index 100% rename from tests/expected/auto/mb_jpg_0_0_0.png rename to tests/expected/auto/mb_jpg_0_0_0.jpg diff --git a/tests/expected/auto/mb_jpg_0_0_0.png.txt b/tests/expected/auto/mb_jpg_0_0_0.jpg.txt similarity index 60% rename from tests/expected/auto/mb_jpg_0_0_0.png.txt rename to tests/expected/auto/mb_jpg_0_0_0.jpg.txt index 9bdac2a77..daa75c631 100644 --- a/tests/expected/auto/mb_jpg_0_0_0.png.txt +++ b/tests/expected/auto/mb_jpg_0_0_0.jpg.txt @@ -1 +1 @@ -tests/output/auto/mb_jpg_0_0_0.png: JPEG image data, JFIF standard 1.01, aspect ratio, density 1x1, segment length 16, baseline, precision 8, 256x256, components 3 +tests/output/auto/mb_jpg_0_0_0.jpg: JPEG image data, JFIF standard 1.01, aspect ratio, density 1x1, segment length 16, baseline, precision 8, 256x256, components 3 diff --git a/tests/expected/auto/points3857_srid.json b/tests/expected/auto/points3857_srid.json index 195123bc5..4b90db678 100644 --- a/tests/expected/auto/points3857_srid.json +++ b/tests/expected/auto/points3857_srid.json @@ -11,6 +11,12 @@ } } ], + "bounds": [ + -161.40590777554058, + -81.50727021609012, + 172.51549126768532, + 84.2440187164111 + ], "description": "public.points3857.geom", "name": "points3857" } diff --git a/tests/expected/auto/table_source.json b/tests/expected/auto/table_source.json index 588e57b37..609c974b5 100644 --- a/tests/expected/auto/table_source.json +++ b/tests/expected/auto/table_source.json @@ -11,6 +11,12 @@ } } ], + "bounds": [ + -2, + -1, + 142.84131509869133, + 45 + ], "name": "table_source", "foo": { "bar": "foo" diff --git a/tests/expected/configured/catalog_cfg.json b/tests/expected/configured/catalog_cfg.json index 03372aca3..810687e76 100644 --- a/tests/expected/configured/catalog_cfg.json +++ b/tests/expected/configured/catalog_cfg.json @@ -39,5 +39,35 @@ "content_type": "application/x-protobuf", "description": "public.table_source.geom" } + }, + "sprites": { + "mysrc": { + "images": [ + "bicycle" + ] + }, + "src1": { + "images": [ + "another_bicycle", + "bear", + "sub/circle" + ] + } + }, + "fonts": { + "Overpass Mono Light": { + "family": "Overpass Mono", + "style": "Light", + "glyphs": 931, + "start": 0, + "end": 64258 + }, + "Overpass Mono Regular": { + "family": "Overpass Mono", + "style": "Regular", + "glyphs": 931, + "start": 0, + "end": 64258 + } } } diff --git a/tests/expected/configured/font_1.pbf b/tests/expected/configured/font_1.pbf new file mode 100644 index 000000000..bb3447323 Binary files /dev/null and b/tests/expected/configured/font_1.pbf differ diff --git a/tests/expected/configured/font_2.pbf b/tests/expected/configured/font_2.pbf new file mode 100644 index 000000000..f57bcc48c Binary files /dev/null and b/tests/expected/configured/font_2.pbf differ diff --git a/tests/expected/configured/font_3.pbf b/tests/expected/configured/font_3.pbf new file mode 100644 index 000000000..f57bcc48c Binary files /dev/null and b/tests/expected/configured/font_3.pbf differ diff --git a/tests/expected/generated_config.yaml b/tests/expected/generated_config.yaml index c51f1f02f..435d52f41 100644 --- a/tests/expected/generated_config.yaml +++ b/tests/expected/generated_config.yaml @@ -1,7 +1,7 @@ listen_addresses: localhost:3111 postgres: default_srid: 900913 - disable_bounds: true + auto_bounds: calc auto_publish: true tables: MixPoints: @@ -9,6 +9,11 @@ postgres: table: MixPoints srid: 4326 geometry_column: Geom + bounds: + - -170.94984639004662 + - -84.20025580733805 + - 167.70892858284475 + - 74.23573284753762 geometry_type: POINT properties: Gid: int4 @@ -18,6 +23,11 @@ postgres: table: auto_table srid: 4326 geometry_column: geom + bounds: + - -166.87107126230424 + - -53.44747249115674 + - 168.14061220360549 + - 84.22411861475385 geometry_type: POINT properties: feat_id: int4 @@ -27,6 +37,11 @@ postgres: table: bigint_table srid: 4326 geometry_column: geom + bounds: + - -174.89475564568033 + - -77.2579745396886 + - 174.72753224514435 + - 73.80785950599903 geometry_type: POINT properties: big_feat_id: int8 @@ -36,6 +51,11 @@ postgres: table: points1 srid: 4326 geometry_column: geom + bounds: + - -179.27313970132585 + - -67.52518563265659 + - 162.60117193735186 + - 84.93092095128937 geometry_type: POINT properties: gid: int4 @@ -44,6 +64,11 @@ postgres: table: points1_vw srid: 4326 geometry_column: geom + bounds: + - -179.27313970132585 + - -67.52518563265659 + - 162.60117193735186 + - 84.93092095128937 geometry_type: POINT properties: gid: int4 @@ -52,6 +77,11 @@ postgres: table: points2 srid: 4326 geometry_column: geom + bounds: + - -174.050750735362 + - -80.46177157848345 + - 179.11187181086706 + - 81.13068764165727 geometry_type: POINT properties: gid: int4 @@ -60,6 +90,11 @@ postgres: table: points3857 srid: 3857 geometry_column: geom + bounds: + - -161.40590777554058 + - -81.50727021609012 + - 172.51549126768532 + - 84.2440187164111 geometry_type: POINT properties: gid: int4 @@ -68,6 +103,11 @@ postgres: table: points_empty_srid srid: 900913 geometry_column: geom + bounds: + - -162.35196679784573 + - -84.49919770031491 + - 178.47294677445652 + - 82.7000012450467 geometry_type: GEOMETRY properties: gid: int4 @@ -76,6 +116,11 @@ postgres: table: table_source srid: 4326 geometry_column: geom + bounds: + - -2.0 + - -1.0 + - 142.84131509869133 + - 45.0 geometry_type: GEOMETRY properties: gid: int4 @@ -84,6 +129,11 @@ postgres: table: table_source_multiple_geom srid: 4326 geometry_column: geom1 + bounds: + - -136.62076049706184 + - -78.3350299285405 + - 176.56297743499888 + - 75.78731065954437 geometry_type: POINT properties: gid: int4 @@ -92,6 +142,11 @@ postgres: table: table_source_multiple_geom srid: 4326 geometry_column: geom2 + bounds: + - -136.62076049706184 + - -78.3350299285405 + - 176.56297743499888 + - 75.78731065954437 geometry_type: POINT properties: gid: int4 @@ -133,23 +188,31 @@ postgres: schema: public function: function_zxy_row_key pmtiles: - paths: tests/fixtures/files + paths: + - tests/fixtures/mbtiles + - tests/fixtures/pmtiles sources: - png: tests/fixtures/files/png.pmtiles - stamen_toner__raster_CC-BY-ODbL_z3: tests/fixtures/files/stamen_toner__raster_CC-BY+ODbL_z3.pmtiles - webp2: tests/fixtures/files/webp2.pmtiles + png: tests/fixtures/pmtiles/png.pmtiles + stamen_toner__raster_CC-BY-ODbL_z3: tests/fixtures/pmtiles/stamen_toner__raster_CC-BY+ODbL_z3.pmtiles + webp2: tests/fixtures/pmtiles/webp2.pmtiles mbtiles: - paths: tests/fixtures/files + paths: + - tests/fixtures/mbtiles + - tests/fixtures/pmtiles sources: - geography-class-jpg: tests/fixtures/files/geography-class-jpg.mbtiles - geography-class-jpg-diff: tests/fixtures/files/geography-class-jpg-diff.mbtiles - geography-class-jpg-modified: tests/fixtures/files/geography-class-jpg-modified.mbtiles - geography-class-png: tests/fixtures/files/geography-class-png.mbtiles - geography-class-png-no-bounds: tests/fixtures/files/geography-class-png-no-bounds.mbtiles - json: tests/fixtures/files/json.mbtiles - uncompressed_mvt: tests/fixtures/files/uncompressed_mvt.mbtiles - webp: tests/fixtures/files/webp.mbtiles - world_cities: tests/fixtures/files/world_cities.mbtiles - world_cities_diff: tests/fixtures/files/world_cities_diff.mbtiles - world_cities_modified: tests/fixtures/files/world_cities_modified.mbtiles - zoomed_world_cities: tests/fixtures/files/zoomed_world_cities.mbtiles + geography-class-jpg: tests/fixtures/mbtiles/geography-class-jpg.mbtiles + geography-class-jpg-diff: tests/fixtures/mbtiles/geography-class-jpg-diff.mbtiles + geography-class-jpg-modified: tests/fixtures/mbtiles/geography-class-jpg-modified.mbtiles + geography-class-png: tests/fixtures/mbtiles/geography-class-png.mbtiles + geography-class-png-no-bounds: tests/fixtures/mbtiles/geography-class-png-no-bounds.mbtiles + json: tests/fixtures/mbtiles/json.mbtiles + uncompressed_mvt: tests/fixtures/mbtiles/uncompressed_mvt.mbtiles + webp: tests/fixtures/mbtiles/webp.mbtiles + world_cities: tests/fixtures/mbtiles/world_cities.mbtiles + world_cities_diff: tests/fixtures/mbtiles/world_cities_diff.mbtiles + world_cities_modified: tests/fixtures/mbtiles/world_cities_modified.mbtiles + zoomed_world_cities: tests/fixtures/mbtiles/zoomed_world_cities.mbtiles +sprites: tests/fixtures/sprites/src1 +fonts: +- tests/fixtures/fonts/overpass-mono-regular.ttf +- tests/fixtures/fonts diff --git a/tests/expected/given_config.yaml b/tests/expected/given_config.yaml index 033c66283..64291515e 100644 --- a/tests/expected/given_config.yaml +++ b/tests/expected/given_config.yaml @@ -159,8 +159,11 @@ postgres: - 90.0 pmtiles: sources: - pmt: tests/fixtures/files/stamen_toner__raster_CC-BY+ODbL_z3.pmtiles + pmt: tests/fixtures/pmtiles/stamen_toner__raster_CC-BY+ODbL_z3.pmtiles sprites: paths: tests/fixtures/sprites/src1 sources: mysrc: tests/fixtures/sprites/src2 +fonts: +- tests/fixtures/fonts/overpass-mono-regular.ttf +- tests/fixtures/fonts diff --git a/tests/expected/mbtiles/copy_diff.txt b/tests/expected/mbtiles/copy_diff.txt index e69de29bb..4a0e287bc 100644 --- a/tests/expected/mbtiles/copy_diff.txt +++ b/tests/expected/mbtiles/copy_diff.txt @@ -0,0 +1,2 @@ +[INFO ] Comparing ./tests/fixtures/mbtiles/world_cities.mbtiles (flat) and ./tests/fixtures/mbtiles/world_cities_modified.mbtiles (flat) into a new file tests/temp/world_cities_diff.mbtiles (flat) +[INFO ] Adding a new metadata value agg_tiles_hash = C7E2E5A9BA04693994DB1F57D1DF5646 in tests/temp/world_cities_diff.mbtiles diff --git a/tests/expected/mbtiles/copy_diff2.txt b/tests/expected/mbtiles/copy_diff2.txt index e69de29bb..e03d7f71a 100644 --- a/tests/expected/mbtiles/copy_diff2.txt +++ b/tests/expected/mbtiles/copy_diff2.txt @@ -0,0 +1,2 @@ +[INFO ] Comparing ./tests/fixtures/mbtiles/world_cities_modified.mbtiles (flat) and tests/temp/world_cities_copy.mbtiles (flat) into a new file tests/temp/world_cities_diff_modified.mbtiles (flat) +[INFO ] Adding a new metadata value agg_tiles_hash = D41D8CD98F00B204E9800998ECF8427E in tests/temp/world_cities_diff_modified.mbtiles diff --git a/tests/expected/mbtiles/help.txt b/tests/expected/mbtiles/help.txt index a7f0896af..55eb51149 100644 --- a/tests/expected/mbtiles/help.txt +++ b/tests/expected/mbtiles/help.txt @@ -3,13 +3,13 @@ A utility to work with .mbtiles file content Usage: mbtiles Commands: - meta-all Prints all values in the metadata table in a free-style, unstable YAML format - meta-get Gets a single value from the MBTiles metadata table - meta-set Sets a single value in the MBTiles' file metadata table or deletes it if no value - copy Copy tiles from one mbtiles file to another - apply-diff Apply diff file generated from 'copy' command - validate Validate tile data if hash of tile data exists in file - help Print this message or the help of the given subcommand(s) + meta-all Prints all values in the metadata table in a free-style, unstable YAML format + meta-get Gets a single value from the MBTiles metadata table + meta-set Sets a single value in the MBTiles' file metadata table or deletes it if no value + copy Copy tiles from one mbtiles file to another + apply-patch Apply diff file generated from 'copy' command + validate Validate tile data if hash of tile data exists in file + help Print this message or the help of the given subcommand(s) Options: -h, --help Print help diff --git a/tests/expected/mbtiles/validate-bad.txt b/tests/expected/mbtiles/validate-bad.txt new file mode 100644 index 000000000..242a6aaaa --- /dev/null +++ b/tests/expected/mbtiles/validate-bad.txt @@ -0,0 +1,3 @@ +[INFO ] Quick integrity check passed for ./tests/fixtures/files/bad_hash.mbtiles +[INFO ] All tile hashes are valid for ./tests/fixtures/files/bad_hash.mbtiles +[ERROR] Computed aggregate tiles hash D4E1030D57751A0B45A28A71267E46B8 does not match tile data in metadata CAFEC0DEDEADBEEFDEADBEEFDEADBEEF for MBTile file ./tests/fixtures/files/bad_hash.mbtiles diff --git a/tests/expected/mbtiles/validate-fix.txt b/tests/expected/mbtiles/validate-fix.txt new file mode 100644 index 000000000..ded275f11 --- /dev/null +++ b/tests/expected/mbtiles/validate-fix.txt @@ -0,0 +1,3 @@ +[INFO ] Quick integrity check passed for tests/temp/fix_bad_hash.mbtiles +[INFO ] All tile hashes are valid for tests/temp/fix_bad_hash.mbtiles +[INFO ] Updating agg_tiles_hash from CAFEC0DEDEADBEEFDEADBEEFDEADBEEF to D4E1030D57751A0B45A28A71267E46B8 in tests/temp/fix_bad_hash.mbtiles diff --git a/tests/expected/mbtiles/validate-fix2.txt b/tests/expected/mbtiles/validate-fix2.txt new file mode 100644 index 000000000..250ba58e6 --- /dev/null +++ b/tests/expected/mbtiles/validate-fix2.txt @@ -0,0 +1,3 @@ +[INFO ] Quick integrity check passed for tests/temp/fix_bad_hash.mbtiles +[INFO ] All tile hashes are valid for tests/temp/fix_bad_hash.mbtiles +[INFO ] The agg_tiles_hashes=D4E1030D57751A0B45A28A71267E46B8 has been verified for tests/temp/fix_bad_hash.mbtiles diff --git a/tests/expected/mbtiles/validate-ok.txt b/tests/expected/mbtiles/validate-ok.txt new file mode 100644 index 000000000..87a15d314 --- /dev/null +++ b/tests/expected/mbtiles/validate-ok.txt @@ -0,0 +1,3 @@ +[INFO ] Quick integrity check passed for ./tests/fixtures/mbtiles/zoomed_world_cities.mbtiles +[INFO ] All tile hashes are valid for ./tests/fixtures/mbtiles/zoomed_world_cities.mbtiles +[INFO ] The agg_tiles_hashes=D4E1030D57751A0B45A28A71267E46B8 has been verified for ./tests/fixtures/mbtiles/zoomed_world_cities.mbtiles diff --git a/tests/fixtures/files/bad_hash.mbtiles b/tests/fixtures/files/bad_hash.mbtiles new file mode 100644 index 000000000..33943e699 Binary files /dev/null and b/tests/fixtures/files/bad_hash.mbtiles differ diff --git a/tests/fixtures/files/invalid/invalid-tile-format.mbtiles b/tests/fixtures/files/invalid-tile-format.mbtiles similarity index 100% rename from tests/fixtures/files/invalid/invalid-tile-format.mbtiles rename to tests/fixtures/files/invalid-tile-format.mbtiles diff --git a/tests/fixtures/files/invalid/invalid.mbtiles b/tests/fixtures/files/invalid.mbtiles similarity index 100% rename from tests/fixtures/files/invalid/invalid.mbtiles rename to tests/fixtures/files/invalid.mbtiles diff --git a/tests/fixtures/files/invalid/invalid_zoomed_world_cities.mbtiles b/tests/fixtures/files/invalid_zoomed_world_cities.mbtiles similarity index 100% rename from tests/fixtures/files/invalid/invalid_zoomed_world_cities.mbtiles rename to tests/fixtures/files/invalid_zoomed_world_cities.mbtiles diff --git a/tests/fixtures/fonts/overpass-mono-regular.ttf b/tests/fixtures/fonts/overpass-mono-regular.ttf new file mode 100755 index 000000000..107fe320d Binary files /dev/null and b/tests/fixtures/fonts/overpass-mono-regular.ttf differ diff --git a/tests/fixtures/fonts/sub_dir/overpass-mono-light.otf b/tests/fixtures/fonts/sub_dir/overpass-mono-light.otf new file mode 100755 index 000000000..64e2932a5 Binary files /dev/null and b/tests/fixtures/fonts/sub_dir/overpass-mono-light.otf differ diff --git a/tests/fixtures/initdb-dc-ssl-cert.sh b/tests/fixtures/initdb-dc-ssl-cert.sh new file mode 100755 index 000000000..84ba461b9 --- /dev/null +++ b/tests/fixtures/initdb-dc-ssl-cert.sh @@ -0,0 +1,18 @@ +#!/usr/bin/env sh +set -e + +mv /var/lib/postgresql/data/pg_hba.conf /var/lib/postgresql/data/pg_hba.conf.bak +cat > /var/lib/postgresql/data/pg_hba.conf < /var/lib/postgresql/data/pg_hba.conf <>(), vec!["MixPoints"],); -} diff --git a/tests/test.sh b/tests/test.sh index 846e1a3de..9e76449d8 100755 --- a/tests/test.sh +++ b/tests/test.sh @@ -4,58 +4,66 @@ set -euo pipefail # TODO: use --fail-with-body to get the response body on failure CURL=${CURL:-curl --silent --show-error --fail --compressed} DATABASE_URL="${DATABASE_URL:-postgres://postgres@localhost/db}" -MARTIN_BUILD="${MARTIN_BUILD:-cargo build --features ssl}" +MARTIN_BUILD="${MARTIN_BUILD:-cargo build}" MARTIN_PORT="${MARTIN_PORT:-3111}" MARTIN_URL="http://localhost:${MARTIN_PORT}" MARTIN_ARGS="${MARTIN_ARGS:---listen-addresses localhost:${MARTIN_PORT}}" -MARTIN_BIN="${MARTIN_BIN:-cargo run --features ssl --} ${MARTIN_ARGS}" +MARTIN_BIN="${MARTIN_BIN:-cargo run --} ${MARTIN_ARGS}" -MBTILES_BUILD="${MBTILES_BUILD:-cargo build -p martin-mbtiles}" +MBTILES_BUILD="${MBTILES_BUILD:-cargo build -p mbtiles}" MBTILES_BIN="${MBTILES_BIN:-target/debug/mbtiles}" -function wait_for_martin { +LOG_DIR="${LOG_DIR:-target/test_logs}" +mkdir -p "$LOG_DIR" + +function wait_for { # Seems the --retry-all-errors option is not available on older curl versions, but maybe in the future we can just use this: # timeout -k 20s 20s curl --retry 10 --retry-all-errors --retry-delay 1 -sS "$MARTIN_URL/health" PROCESS_ID=$1 - echo "Waiting for Martin ($PROCESS_ID) to start by checking $MARTIN_URL/health to be valid..." + PROC_NAME=$2 + TEST_URL=$3 + echo "Waiting for $PROC_NAME ($PROCESS_ID) to start by checking $TEST_URL to be valid..." for i in {1..60}; do - if $CURL "$MARTIN_URL/health" 2>/dev/null >/dev/null; then - echo "Martin is up!" - $CURL "$MARTIN_URL/health" + if $CURL "$TEST_URL" 2>/dev/null >/dev/null; then + echo "$PROC_NAME is up!" + if [[ "$PROC_NAME" == "Martin" ]]; then + $CURL "$TEST_URL" + fi return fi if ps -p $PROCESS_ID > /dev/null ; then - echo "Martin is not up yet, waiting for $MARTIN_URL/health ..." + echo "$PROC_NAME is not up yet, waiting for $TEST_URL ..." sleep 1 else - echo "Martin died!" + echo "$PROC_NAME died!" ps au - lsof -i || true + lsof -i || true; exit 1 fi done - echo "Martin did not start in time" + echo "$PROC_NAME did not start in time" ps au - lsof -i || true + lsof -i || true; exit 1 } function kill_process { PROCESS_ID=$1 - echo "Waiting for Martin ($PROCESS_ID) to stop..." + PROC_NAME=$2 + echo "Waiting for $PROC_NAME ($PROCESS_ID) to stop..." kill $PROCESS_ID for i in {1..50}; do if ps -p $PROCESS_ID > /dev/null ; then sleep 0.1 else - echo "Martin ($PROCESS_ID) has stopped" + echo "$PROC_NAME ($PROCESS_ID) has stopped" return fi done - echo "Martin did not stop in time, killing it" + echo "$PROC_NAME did not stop in time, killing it" kill -9 $PROCESS_ID # wait for it to die using timeout and wait - timeout -k 1s 1s wait $PROCESS_ID || true + timeout -k 1s 1s wait $PROCESS_ID || true; } test_jsn() @@ -83,7 +91,8 @@ test_pbf() test_png() { - FILENAME="$TEST_OUT_DIR/$1.png" + # 3rd argument is optional, .png by default + FILENAME="$TEST_OUT_DIR/$1.${3:-png}" URL="$MARTIN_URL/$2" echo "Testing $(basename "$FILENAME") from $URL" @@ -94,6 +103,21 @@ test_png() fi } +test_jpg() +{ + test_png $1 $2 jpg +} + + +test_font() +{ + FILENAME="$TEST_OUT_DIR/$1.pbf" + URL="$MARTIN_URL/$2" + + echo "Testing $(basename "$FILENAME") from $URL" + $CURL "$URL" > "$FILENAME" +} + # Delete a line from a file $1 that matches parameter $2 remove_line() { @@ -124,6 +148,7 @@ validate_log() # Make sure the log has just the expected warnings, remove them, and test that there are no other ones test_log_has_str "$LOG_FILE" 'WARN martin::pg::table_source] Table public.table_source has no spatial index on column geom' + test_log_has_str "$LOG_FILE" 'WARN martin::fonts] Ignoring duplicate font Overpass Mono Regular from tests' echo "Checking for no other warnings or errors in the log" if grep -e ' ERROR ' -e ' WARN ' "$LOG_FILE"; then @@ -150,14 +175,14 @@ echo "Test auto configured Martin" TEST_OUT_DIR="$(dirname "$0")/output/auto" mkdir -p "$TEST_OUT_DIR" -ARG=(--default-srid 900913 --disable-bounds --save-config "$(dirname "$0")/output/generated_config.yaml" tests/fixtures/files) +ARG=(--default-srid 900913 --auto-bounds calc --save-config "$(dirname "$0")/output/generated_config.yaml" tests/fixtures/mbtiles tests/fixtures/pmtiles --sprite tests/fixtures/sprites/src1 --font tests/fixtures/fonts/overpass-mono-regular.ttf --font tests/fixtures/fonts) set -x -$MARTIN_BIN "${ARG[@]}" 2>&1 | tee test_log_1.txt & -PROCESS_ID=`jobs -p` +$MARTIN_BIN "${ARG[@]}" 2>&1 | tee "${LOG_DIR}/test_log_1.txt" & +MARTIN_PROC_ID=`jobs -p | tail -n 1` { set +x; } 2> /dev/null -trap "kill -9 $PROCESS_ID 2> /dev/null || true" EXIT -wait_for_martin $PROCESS_ID +trap "echo 'Stopping Martin server $MARTIN_PROC_ID...'; kill -9 $MARTIN_PROC_ID 2> /dev/null || true; echo 'Stopped Martin server $MARTIN_PROC_ID';" EXIT HUP INT TERM +wait_for $MARTIN_PROC_ID Martin "$MARTIN_URL/health" >&2 echo "Test catalog" test_jsn catalog_auto catalog @@ -217,7 +242,7 @@ test_png pmt_3_4_2 stamen_toner__raster_CC-BY-ODbL_z3/3/4/2 >&2 echo "***** Test server response for MbTiles source *****" test_jsn mb_jpg geography-class-jpg -test_png mb_jpg_0_0_0 geography-class-jpg/0/0/0 +test_jpg mb_jpg_0_0_0 geography-class-jpg/0/0/0 test_jsn mb_png geography-class-png test_png mb_png_0_0_0 geography-class-png/0/0/0 test_jsn mb_mvt world_cities @@ -226,8 +251,8 @@ test_pbf mb_mvt_2_3_1 world_cities/2/3/1 >&2 echo "***** Test server response for table source with empty SRID *****" test_pbf points_empty_srid_0_0_0 points_empty_srid/0/0/0 -kill_process $PROCESS_ID -validate_log test_log_1.txt +kill_process $MARTIN_PROC_ID Martin +validate_log "${LOG_DIR}/test_log_1.txt" echo "------------------------------------------------------------------------------------------------------------------------" @@ -237,11 +262,11 @@ mkdir -p "$TEST_OUT_DIR" ARG=(--config tests/config.yaml --max-feature-count 1000 --save-config "$(dirname "$0")/output/given_config.yaml" -W 1) set -x -$MARTIN_BIN "${ARG[@]}" 2>&1 | tee test_log_2.txt & -PROCESS_ID=`jobs -p` +$MARTIN_BIN "${ARG[@]}" 2>&1 | tee "${LOG_DIR}/test_log_2.txt" & +MARTIN_PROC_ID=`jobs -p | tail -n 1` { set +x; } 2> /dev/null -trap "kill -9 $PROCESS_ID 2> /dev/null || true" EXIT -wait_for_martin $PROCESS_ID +trap "echo 'Stopping Martin server $MARTIN_PROC_ID...'; kill -9 $MARTIN_PROC_ID 2> /dev/null || true; echo 'Stopped Martin server $MARTIN_PROC_ID';" EXIT HUP INT TERM +wait_for $MARTIN_PROC_ID Martin "$MARTIN_URL/health" >&2 echo "Test catalog" test_jsn catalog_cfg catalog @@ -265,13 +290,16 @@ test_png spr_cmp sprite/src1,mysrc.png test_jsn spr_cmp_2x sprite/src1,mysrc@2x.json test_png spr_cmp_2x sprite/src1,mysrc@2x.png -kill_process $PROCESS_ID -validate_log test_log_2.txt +test_font font_1 font/Overpass%20Mono%20Light/0-255 +test_font font_2 font/Overpass%20Mono%20Regular/0-255 +test_font font_3 font/Overpass%20Mono%20Regular,Overpass%20Mono%20Light/0-255 + +kill_process $MARTIN_PROC_ID Martin +validate_log "${LOG_DIR}/test_log_2.txt" remove_line "$(dirname "$0")/output/given_config.yaml" " connection_string: " remove_line "$(dirname "$0")/output/generated_config.yaml" " connection_string: " - echo "------------------------------------------------------------------------------------------------------------------------" echo "Test mbtiles utility" if [[ "$MBTILES_BIN" != "-" ]]; then @@ -289,21 +317,34 @@ if [[ "$MBTILES_BIN" != "-" ]]; then $MBTILES_BIN --help 2>&1 | tee "$TEST_OUT_DIR/help.txt" $MBTILES_BIN meta-all --help 2>&1 | tee "$TEST_OUT_DIR/meta-all_help.txt" - $MBTILES_BIN meta-all ./tests/fixtures/files/world_cities.mbtiles 2>&1 | tee "$TEST_OUT_DIR/meta-all.txt" + $MBTILES_BIN meta-all ./tests/fixtures/mbtiles/world_cities.mbtiles 2>&1 | tee "$TEST_OUT_DIR/meta-all.txt" $MBTILES_BIN meta-get --help 2>&1 | tee "$TEST_OUT_DIR/meta-get_help.txt" - $MBTILES_BIN meta-get ./tests/fixtures/files/world_cities.mbtiles name 2>&1 | tee "$TEST_OUT_DIR/meta-get_name.txt" - $MBTILES_BIN meta-get ./tests/fixtures/files/world_cities.mbtiles missing_value 2>&1 | tee "$TEST_OUT_DIR/meta-get_missing_value.txt" + $MBTILES_BIN meta-get ./tests/fixtures/mbtiles/world_cities.mbtiles name 2>&1 | tee "$TEST_OUT_DIR/meta-get_name.txt" + $MBTILES_BIN meta-get ./tests/fixtures/mbtiles/world_cities.mbtiles missing_value 2>&1 | tee "$TEST_OUT_DIR/meta-get_missing_value.txt" + $MBTILES_BIN validate ./tests/fixtures/mbtiles/zoomed_world_cities.mbtiles 2>&1 | tee "$TEST_OUT_DIR/validate-ok.txt" + + set +e + $MBTILES_BIN validate ./tests/fixtures/files/bad_hash.mbtiles 2>&1 | tee "$TEST_OUT_DIR/validate-bad.txt" + if [[ $? -eq 0 ]]; then + echo "ERROR: validate with bad_hash should have failed" + exit 1 + fi + set -e + + cp ./tests/fixtures/files/bad_hash.mbtiles "$TEST_TEMP_DIR/fix_bad_hash.mbtiles" + $MBTILES_BIN validate --update-agg-tiles-hash "$TEST_TEMP_DIR/fix_bad_hash.mbtiles" 2>&1 | tee "$TEST_OUT_DIR/validate-fix.txt" + $MBTILES_BIN validate "$TEST_TEMP_DIR/fix_bad_hash.mbtiles" 2>&1 | tee "$TEST_OUT_DIR/validate-fix2.txt" # Create diff file $MBTILES_BIN copy \ - ./tests/fixtures/files/world_cities.mbtiles \ + ./tests/fixtures/mbtiles/world_cities.mbtiles \ "$TEST_TEMP_DIR/world_cities_diff.mbtiles" \ - --diff-with-file ./tests/fixtures/files/world_cities_modified.mbtiles \ + --diff-with-file ./tests/fixtures/mbtiles/world_cities_modified.mbtiles \ 2>&1 | tee "$TEST_OUT_DIR/copy_diff.txt" if command -v sqlite3 > /dev/null; then # Apply this diff to the original version of the file - cp ./tests/fixtures/files/world_cities.mbtiles "$TEST_TEMP_DIR/world_cities_copy.mbtiles" + cp ./tests/fixtures/mbtiles/world_cities.mbtiles "$TEST_TEMP_DIR/world_cities_copy.mbtiles" sqlite3 "$TEST_TEMP_DIR/world_cities_copy.mbtiles" \ -bail \ @@ -315,7 +356,7 @@ if [[ "$MBTILES_BIN" != "-" ]]; then # Ensure that applying the diff resulted in the modified version of the file $MBTILES_BIN copy \ --diff-with-file "$TEST_TEMP_DIR/world_cities_copy.mbtiles" \ - ./tests/fixtures/files/world_cities_modified.mbtiles \ + ./tests/fixtures/mbtiles/world_cities_modified.mbtiles \ "$TEST_TEMP_DIR/world_cities_diff_modified.mbtiles" \ 2>&1 | tee "$TEST_OUT_DIR/copy_diff2.txt"