From ac1e7b9354329c4d0ddd4012bbacdbae98386619 Mon Sep 17 00:00:00 2001 From: Matt McCormick Date: Thu, 23 Apr 2026 12:39:01 -0400 Subject: [PATCH 1/7] ENH: Add ExternalDataUpload skill for IPFS content-link workflow Adds Utilities/Maintenance/ExternalDataUpload/ with a Claude Code skill that uploads test data to IPFS under the UnixFS v1 2025 profile, pins on the redundant itk-pinata and itk-filebase remote services, optionally mirrors bytes into an ITKTestingData clone at CID/ (with a 50 MB guard for GitHub's per-file push limit), maintains a new Testing/Data/content-links.manifest index, batch-pins every manifest CID, and normalizes existing .md5 / .sha256 / .cid links by fetching through the gateway templates parsed directly from CMake/ITKExternalData.cmake and re-uploading under the current UnixFS profile. Documents the one-time Kubo + IPFS Desktop setup and references the skill from Testing/Data/README.md. --- Testing/Data/README.md | 21 + Testing/Data/content-links.manifest | 14 + .../Maintenance/ExternalDataUpload/README.md | 313 +++++++++++++ .../Maintenance/ExternalDataUpload/SKILL.md | 126 ++++++ .../content-link-normalize.sh | 416 ++++++++++++++++++ .../ExternalDataUpload/ipfs-pin-all.sh | 176 ++++++++ .../ExternalDataUpload/ipfs-upload.sh | 308 +++++++++++++ 7 files changed, 1374 insertions(+) create mode 100644 Testing/Data/content-links.manifest create mode 100644 Utilities/Maintenance/ExternalDataUpload/README.md create mode 100644 Utilities/Maintenance/ExternalDataUpload/SKILL.md create mode 100755 Utilities/Maintenance/ExternalDataUpload/content-link-normalize.sh create mode 100755 Utilities/Maintenance/ExternalDataUpload/ipfs-pin-all.sh create mode 100755 Utilities/Maintenance/ExternalDataUpload/ipfs-upload.sh diff --git a/Testing/Data/README.md b/Testing/Data/README.md index 41c19ec53ec..275e0dff962 100644 --- a/Testing/Data/README.md +++ b/Testing/Data/README.md @@ -7,3 +7,24 @@ tests and hence ensure the health of the toolkit: * The `Baseline` directory contains valid images created by tests. Generated images are compared with these baseline images during regression testing. * The `Input` directory contains data files that are used by the tests. + +Adding test data +---------------- + +Test data is fetched at build time from content-addressed storage by +`CMake/ITKExternalData.cmake`. Large files are *not* committed to the ITK git +repository; instead, a small `.cid` (or `.md5` / `.sha256`) content-link file +is committed next to where the data is referenced. + +To add new test data, use the upload skill at +`Utilities/Maintenance/ExternalDataUpload/`: + +```bash +Utilities/Maintenance/ExternalDataUpload/ipfs-upload.sh +``` + +The script uploads the file to IPFS, pins it on the redundant pinning +services, replaces the original with a `.cid` content-link, and records the +CID in `Testing/Data/content-links.manifest`. See the skill's `README.md` +for one-time setup and the full workflow, including the optional +`ITKTestingData` GitHub Pages mirror step. diff --git a/Testing/Data/content-links.manifest b/Testing/Data/content-links.manifest new file mode 100644 index 00000000000..d9d6fca9994 --- /dev/null +++ b/Testing/Data/content-links.manifest @@ -0,0 +1,14 @@ +# ITK content-link manifest +# +# One entry per line, format: +# +# Maintained automatically by +# Utilities/Maintenance/ExternalDataUpload/ipfs-upload.sh +# and used by +# Utilities/Maintenance/ExternalDataUpload/ipfs-pin-all.sh +# to batch-pin every CID on the local Kubo node and every configured +# remote pinning service (itk-pinata, itk-filebase, ...). +# +# Paths must not contain whitespace (the manifest uses a single space as +# the field delimiter). Data lines are kept sorted by path; comment lines +# above the first data line are preserved on re-write. diff --git a/Utilities/Maintenance/ExternalDataUpload/README.md b/Utilities/Maintenance/ExternalDataUpload/README.md new file mode 100644 index 00000000000..50bc029f2ba --- /dev/null +++ b/Utilities/Maintenance/ExternalDataUpload/README.md @@ -0,0 +1,313 @@ +# ITK External Data Upload + +Upload large test images and baselines to IPFS, optionally mirror them into the +[`ITKTestingData`](https://github.com/InsightSoftwareConsortium/ITKTestingData) +repository, and replace the original with a lightweight `.cid` content link +committed to the ITK source tree. + +This complements [`CMake/ITKExternalData.cmake`](../../../CMake/ITKExternalData.cmake), +which fetches content at test configure time from the gateways listed there +(`ITKTestingData` on GitHub Pages, `data.kitware.com`, `itk.org`, local Kubo +gateway, `ipfs.io`, `gateway.pinata.cloud`, `cloudflare-ipfs.com`, +`dweb.link`). + +## One-Time Developer Setup + +### 1. Install Kubo (IPFS) + +You need the Kubo IPFS implementation. Choose one method: + +**IPFS Desktop** (recommended — bundles the Kubo daemon with a GUI, with a +system-tray icon, peer/bandwidth statistics, a file browser for your MFS, and +one-click start/stop): + +Download from . IPFS Desktop +auto-starts the daemon on login and exposes the same HTTP API that the `ipfs` +CLI uses (default `127.0.0.1:5001`), so every command in this guide works +identically whether you started the daemon from the command line or from the +tray. + +**CLI only** (macOS): + +```bash +brew install ipfs +``` + +**CLI only** (Linux): + +Download the latest release from , then: + +```bash +tar xvfz kubo_*_linux-amd64.tar.gz +cd kubo && sudo bash install.sh +``` + +After installation, verify `ipfs` is on your PATH: + +```bash +ipfs --version +``` + +### 2. Initialize and Start the Daemon + +```bash +# One-time initialization (creates ~/.ipfs) +ipfs init + +# Start the daemon (keep running in a separate terminal, or use IPFS Desktop) +ipfs daemon +``` + +### 2a. Apply the UnixFS v1 2025 Profile + +Requires **Kubo v0.40.0 or later**. Apply once per node, before your first +upload: + +```bash +ipfs config profile apply unixfs-v1-2025 +``` + +This pins the UnixFS importer settings (chunker, layout, raw-leaves, HAMT +directory thresholds) to standardized values for reproducible CIDs. Without it, +`ipfs add` defaults may drift across Kubo patch releases and across +implementations (Helia, rust-ipfs, boxo), so two contributors uploading the +same file can produce different CIDs — which breaks the `.cid` content-link +contract ITK relies on. + +The profile applies to **new adds only**; existing pinned content and +already-committed `.cid` files are unaffected. + +References: + +- [Kubo v0.40.0 release notes](https://github.com/ipfs/kubo/releases/tag/v0.40.0) +- [Reproducible CIDs — IPFS blog, March 2026](https://blog.ipfs.tech/2026-03-reproducible-cids/) + +### 3. Configure Remote Pinning Services + +The upload script pins content on two remote services for redundancy, matching +the gateways declared in `CMake/ITKExternalData.cmake`. Both services must be +configured under the **exact names `itk-pinata` and `itk-filebase`** — the +upload script looks up those names and fails if they are missing. + +#### Pinata (service name: `itk-pinata`) + +1. Sign up at +2. Create an API key at + - Enable **pinByHash** and **pinFileToIPFS** permissions +3. Copy the JWT token and add the service (use a prompt to avoid leaking + the token into shell history): + +```bash +printf "Pinata JWT: " && read -rs PINATA_JWT && echo +ipfs pin remote service add itk-pinata https://api.pinata.cloud/psa "$PINATA_JWT" +``` + +4. Verify: + +```bash +ipfs pin remote service ls +# Should show: itk-pinata https://api.pinata.cloud/psa +``` + +#### Filebase (service name: `itk-filebase`) + +1. Sign up at +2. Create an **IPFS bucket** at +3. Go to , select your IPFS bucket in the + "IPFS Pinning Service API Endpoint" section, and copy the generated token +4. Add the service: + +```bash +printf "Filebase token: " && read -rs FILEBASE_TOKEN && echo +ipfs pin remote service add itk-filebase https://api.filebase.io/v1/ipfs "$FILEBASE_TOKEN" +``` + +5. Verify: + +```bash +ipfs pin remote service ls +# Should show: itk-filebase https://api.filebase.io/v1/ipfs +``` + +## Usage + +### Upload a single file + +```bash +Utilities/Maintenance/ExternalDataUpload/ipfs-upload.sh +``` + +The script will: + +1. Add the file to IPFS with `--cid-version=1` (UnixFS v1 2025 profile) +2. Pin it locally +3. Pin it on `itk-pinata` and `itk-filebase` +4. Replace the original file with `.cid` containing the CID +5. Append/update an entry in `Testing/Data/content-links.manifest` +6. Print the `git rm` / `git add` commands to stage the change + +### Also mirror the bytes to `ITKTestingData` + +Pass `--testing-data-repo ` to additionally copy the file into a local +clone of +[`ITKTestingData`](https://github.com/InsightSoftwareConsortium/ITKTestingData) +at `CID/` and `git add` it there. This populates the +`https://insightsoftwareconsortium.github.io/ITKTestingData/CID/` mirror +gateway already listed in `CMake/ITKExternalData.cmake`. + +```bash +Utilities/Maintenance/ExternalDataUpload/ipfs-upload.sh \ + --testing-data-repo ~/src/ITKTestingData \ + Testing/Data/Input/brain.nii.gz +``` + +**GitHub 50 MB file size limit.** `ITKTestingData` is hosted on GitHub, which +hard-rejects pushes containing files larger than **50 MB** per file. The upload +script checks the file size before mirroring and refuses to copy files over +50 MB into the `ITKTestingData` tree. IPFS pinning (local + `itk-pinata` + +`itk-filebase`) still proceeds for oversized files — the mirror step is the +only one that gets skipped, with a clear warning. + +Commit the staged `CID/` file in `ITKTestingData` and push; the +`gh-pages` workflow on that repo republishes the new file at the GitHub Pages +mirror gateway. + +### Batch-pin every CID in the manifest + +```bash +Utilities/Maintenance/ExternalDataUpload/ipfs-pin-all.sh +``` + +Reads `Testing/Data/content-links.manifest` and pins every CID locally plus on +every configured remote pinning service. Useful for: + +- Bootstrapping a new local Kubo node with all ITK test content +- Re-pinning everything after rotating a pinning provider +- Verifying all pinned content is still reachable + +Use `--background` to queue remote pins asynchronously (the remote services +then fetch the content themselves): + +```bash +Utilities/Maintenance/ExternalDataUpload/ipfs-pin-all.sh --background +``` + +### Normalize existing content links to CID + +`.md5` / `.sha256` / `.sha512` content links can be converted to `.cid`, and +existing `.cid` links can be regenerated under the UnixFS v1 2025 profile (in +case they were originally produced with older chunker defaults). + +```bash +Utilities/Maintenance/ExternalDataUpload/content-link-normalize.sh +``` + +The script will, for each content link under the given path: + +1. Fetch the bytes through the gateways in `CMake/ITKExternalData.cmake` (same + order the build uses, so a gateway CI can't reach is a gateway this script + won't accept). +2. Verify the fetched bytes against the declared hash (for `.md5` / `.shaNNN` + links) or the declared CID (for `.cid` links). If verification fails the + link is left untouched and reported. +3. Re-materialize the actual file next to the content link, then invoke + `ipfs-upload.sh` on it so the new CID is produced under the UnixFS v1 2025 + profile, pinned locally and on `itk-pinata` / `itk-filebase`, and (if + `--testing-data-repo` is passed) mirrored into `ITKTestingData`. The old + `.md5` / `.sha256` / `.sha512` link is removed; a `.cid` link is written in + its place. + +Common options: + +```bash +# Dry run — report what would change, modify nothing. +content-link-normalize.sh Modules/Filtering/Foo --dry-run + +# Also mirror bytes into a local ITKTestingData checkout. +content-link-normalize.sh Testing/Data/Input --testing-data-repo ~/src/ITKTestingData + +# Only process files that are currently .md5 / .shaNNN (skip existing .cid). +content-link-normalize.sh Modules --hash-only +``` + +## Content Link Manifest + +`Testing/Data/content-links.manifest` is a plain-text index of every CID the +upload script has produced. One entry per line: + +```text + +``` + +Example: + +```text +bafybeigdyrzt5sfp7udm7hu76uh7y26nf3efuylqabf3oclgtqy55fbzdi Testing/Data/Input/brain.nii.gz +bafkreihvlpx2z3xyhmhegrqo6vn4balcm3gkskdigoyl3i5v7iq5mhtaee Testing/Data/Baseline/Filtering/brain-diff.mha +``` + +Rules: + +- `` is a repo-relative path and **must not contain whitespace** — + the manifest uses a single space as the field delimiter. Rename files with + spaces before uploading. +- `ipfs-upload.sh` maintains this file automatically: entries are added on + first upload and replaced on re-upload. The data lines are sorted by path + for a minimal review diff; comment lines at the top are preserved. +- The manifest should be committed alongside the `.cid` files the upload + produced. + +## How `.cid` Files Work + +A `.cid` file is a single-line plain-text file containing one IPFS CIDv1, +base32-encoded. Example: + +```text +bafybeigdyrzt5sfp7udm7hu76uh7y26nf3efuylqabf3oclgtqy55fbzdi +``` + +ITK's CMake layer recognises the `.cid` extension via +`ExternalData_URL_ALGO_CID_lower` in +[`CMake/ITKExternalData.cmake`](../../../CMake/ITKExternalData.cmake). At +configure time, `ExternalData.cmake` substitutes the CID into each +`ExternalData_URL_TEMPLATES` entry (local Kubo gateway, `ipfs.io`, +`gateway.pinata.cloud`, `cloudflare-ipfs.com`, `dweb.link`, plus the +`ITKTestingData` GitHub Pages mirror) and downloads from the first one that +responds. The downloaded content is cached in +`ExternalData_OBJECT_STORES` under `cid/`. + +Because CIDs are content-addressed, a corrupt download is detected +automatically: a gateway that returns the wrong bytes will produce a different +CID, and the cache lookup misses. + +## Troubleshooting + +### `ipfs command not found on PATH` + +Install Kubo (see step 1 above). If using IPFS Desktop on macOS, the app +installs `/usr/local/bin/ipfs` automatically; on Linux, IPFS Desktop does not +install a CLI symlink, so either add Kubo separately or point your shell at +the bundled binary inside the AppImage. + +### `IPFS daemon does not appear to be running` + +Start the daemon: `ipfs daemon` in a separate terminal, or launch IPFS +Desktop. The script tests the connection with `ipfs swarm peers`, which +requires an active daemon. + +### `Required pinning service 'itk-pinata' is not configured` + +Run `ipfs pin remote service ls` to see configured services. Re-add with the +commands in step 3 above. Tokens may have expired if you revoked the API key. +The script intentionally refuses to upload if either `itk-pinata` or +`itk-filebase` is missing: a single pin provider is not enough redundancy for +test data CI relies on. + +### Remote pin failed + +The script prints retry commands for any failed pins. Common causes: + +- **Expired API token** — regenerate at the service dashboard +- **Rate limiting** — wait a moment and retry +- **Large file timeout** — the file may take time to transfer; retry the + printed `ipfs pin remote add` command manually diff --git a/Utilities/Maintenance/ExternalDataUpload/SKILL.md b/Utilities/Maintenance/ExternalDataUpload/SKILL.md new file mode 100644 index 00000000000..ad170a7e643 --- /dev/null +++ b/Utilities/Maintenance/ExternalDataUpload/SKILL.md @@ -0,0 +1,126 @@ +--- +name: external-data-upload +description: > + Upload ITK test data to IPFS and produce .cid content links, pin on + itk-pinata and itk-filebase, optionally mirror into ITKTestingData, and + normalize existing .md5 / .sha256 / .cid content links. Use when the + user wants to add test images, baseline data, or model files under + Testing/Data/ or a module's data/ directory, or when asked to convert + hash-based content links to CID. +allowed-tools: + - Bash + - Read +--- + +# ITK External Data Upload + +Upload a file to IPFS and replace it with a `.cid` content link, maintain the +`Testing/Data/content-links.manifest`, and (optionally) mirror the bytes into +`ITKTestingData` for the GitHub Pages gateway. Also: regenerate existing +`.md5` / `.sha256` / `.cid` content links under the UnixFS v1 2025 profile. + +## Prerequisites + +The developer must have IPFS and pinning services configured. If not, direct +them to [`README.md`](./README.md) in this directory. + +Required: + +- IPFS daemon running (`ipfs daemon` or IPFS Desktop) +- UnixFS v1 2025 profile applied (`ipfs config profile apply unixfs-v1-2025`) +- `itk-pinata` remote pinning service configured +- `itk-filebase` remote pinning service configured + +## Tasks this skill handles + +### 1. Upload a single file + +Run the upload script: + +```bash +Utilities/Maintenance/ExternalDataUpload/ipfs-upload.sh +``` + +If the user mentions `ITKTestingData` or asks you to mirror the bytes to +GitHub Pages, pass `--testing-data-repo `: + +```bash +Utilities/Maintenance/ExternalDataUpload/ipfs-upload.sh \ + --testing-data-repo \ + +``` + +The script will: + +1. Add to IPFS with `--cid-version=1` (UnixFS v1 2025 profile) +2. Pin locally, on `itk-pinata`, and on `itk-filebase` +3. If `--testing-data-repo` given and file ≤ 50 MB, copy to + `/CID/` and `git add` it there. Files over 50 MB are skipped + for the mirror step only (GitHub rejects > 50 MB) — IPFS pinning still + succeeds. +4. Replace the source file with `.cid` +5. Update `Testing/Data/content-links.manifest` + +### 2. Pin every CID from the manifest + +```bash +Utilities/Maintenance/ExternalDataUpload/ipfs-pin-all.sh +``` + +Use for bootstrapping a new IPFS node or re-pinning after rotating a provider. + +### 3. Normalize existing content links + +Use when the user wants to convert `.md5` / `.sha256` / `.sha512` links to +`.cid`, or re-generate `.cid` links under the UnixFS v1 2025 profile. + +```bash +Utilities/Maintenance/ExternalDataUpload/content-link-normalize.sh +``` + +Useful options: + +- `--dry-run` — report what would change +- `--hash-only` — only touch `.md5` / `.shaNNN` links, leave `.cid` alone +- `--cid-only` — only re-hash existing `.cid` links under the new profile +- `--testing-data-repo ` — forwarded to `ipfs-upload.sh` + +The normalize script fetches bytes through the gateway templates in +`CMake/ITKExternalData.cmake` (same order as the build), verifies them +against the declared hash or CID, and invokes `ipfs-upload.sh` to produce +the new `.cid`. + +## After Upload + +Stage the git changes the upload script prints. Typical ITK workflow: + +```bash +git rm +git add .cid +git add Testing/Data/content-links.manifest +``` + +If `--testing-data-repo` was used, follow the printed commands in that repo: + +```bash +git -C commit -m "Add ()" +git -C push +``` + +Commit the ITK changes with an appropriate prefix per +[`Documentation/AI/git-commits.md`](../../../Documentation/AI/git-commits.md): + +- `ENH:` for new test data +- `STYLE:` for normalizing existing content links (no test semantics change) + +## How `.cid` Files Work + +A `.cid` file is one line of plain text: a CIDv1, base32-encoded. ITK's +`CMake/ITKExternalData.cmake` recognises the `.cid` extension and fetches +through the gateway list declared there (local Kubo, `ipfs.io`, +`gateway.pinata.cloud`, `cloudflare-ipfs.com`, `dweb.link`, plus the +`ITKTestingData` GitHub Pages mirror at +`insightsoftwareconsortium.github.io/ITKTestingData/CID/`). + +Because CIDs are content-addressed, integrity is verified automatically at +fetch time. diff --git a/Utilities/Maintenance/ExternalDataUpload/content-link-normalize.sh b/Utilities/Maintenance/ExternalDataUpload/content-link-normalize.sh new file mode 100755 index 00000000000..bd01128eb36 --- /dev/null +++ b/Utilities/Maintenance/ExternalDataUpload/content-link-normalize.sh @@ -0,0 +1,416 @@ +#!/usr/bin/env bash +set -euo pipefail + +# Normalize ITK content links: convert .md5 / .shaNNN links to .cid and +# regenerate existing .cid links under the UnixFS v1 2025 profile. +# +# For each content link found, the script: +# 1. Fetches the bytes via the gateway templates declared in +# CMake/ITKExternalData.cmake (identical order to the build). +# 2. Verifies the bytes against the declared hash or CID. +# 3. Re-materialises the actual file alongside the link, then invokes +# ipfs-upload.sh on it so a fresh CID is produced under the UnixFS +# v1 2025 profile, pinned on itk-pinata and itk-filebase, and +# (optionally) mirrored into ITKTestingData. +# +# Usage: +# content-link-normalize.sh [options] +# +# Options: +# --testing-data-repo Forwarded to ipfs-upload.sh. Local +# ITKTestingData clone to mirror bytes into. +# --dry-run List what would change without modifying. +# --hash-only Process only .md5 / .shaNNN links +# (leave existing .cid links alone). +# --cid-only Process only .cid links +# (re-hash under UnixFS v1 2025 profile). +# -h|--help Show this help. +# +# Exit codes: +# 0 — all content links normalized +# 1 — usage / environment error +# 2 — one or more links failed to fetch, verify, or re-upload + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +REPO_ROOT="$(cd "$SCRIPT_DIR/../../.." && pwd)" +CMAKE_FILE="$REPO_ROOT/CMake/ITKExternalData.cmake" +UPLOAD_SCRIPT="$SCRIPT_DIR/ipfs-upload.sh" + +info() { printf '==> %s\n' "$*"; } +warn() { printf 'WARN: %s\n' "$*" >&2; } +die() { printf 'ERROR: %s\n' "$*" >&2; exit 1; } + +show_help() { + sed -n '3,/^$/{ s/^# \?//; p }' "$0" + exit 0 +} + +# --------------------------------------------------------------------------- +# Argument parsing +# --------------------------------------------------------------------------- + +TESTING_DATA_REPO="" +DRY_RUN=false +HASH_ONLY=false +CID_ONLY=false +TARGET="" + +while [[ $# -gt 0 ]]; do + case "$1" in + -h|--help) show_help ;; + --dry-run) DRY_RUN=true; shift ;; + --hash-only) HASH_ONLY=true; shift ;; + --cid-only) CID_ONLY=true; shift ;; + --testing-data-repo) + TESTING_DATA_REPO="${2:?--testing-data-repo requires a path}" + shift 2 + ;; + --testing-data-repo=*) + TESTING_DATA_REPO="${1#--testing-data-repo=}" + shift + ;; + -*) die "Unknown option: $1" ;; + *) + [[ -z "$TARGET" ]] || die "Unexpected positional arg: $1" + TARGET="$1" + shift + ;; + esac +done + +[[ -n "$TARGET" ]] || die "Path or file required. Example: content-link-normalize.sh Testing/Data/Input" +[[ -e "$TARGET" ]] || die "Not found: $TARGET" +[[ -f "$CMAKE_FILE" ]] || die "Cannot find $CMAKE_FILE" +[[ -x "$UPLOAD_SCRIPT" ]] || die "Cannot find or execute $UPLOAD_SCRIPT" + +if $HASH_ONLY && $CID_ONLY; then + die "--hash-only and --cid-only are mutually exclusive" +fi + +# --------------------------------------------------------------------------- +# Prerequisites +# --------------------------------------------------------------------------- + +command -v curl >/dev/null 2>&1 || die "curl is required" +command -v ipfs >/dev/null 2>&1 || die "ipfs is required (for CID recomputation)" + +# Hash tools are only needed for links we actually encounter, but fail fast. +for tool in md5sum sha1sum sha224sum sha256sum sha384sum sha512sum; do + command -v "$tool" >/dev/null 2>&1 || warn "$tool not found; any matching content links will fail to verify" +done + +# --------------------------------------------------------------------------- +# Parse ExternalData_URL_TEMPLATES from CMake/ITKExternalData.cmake +# --------------------------------------------------------------------------- +# +# Matches the order in the .cmake file exactly. The block we want looks like: +# +# list( +# APPEND +# ExternalData_URL_TEMPLATES +# # comment +# "https://.../%(hash)" +# ... +# ) +# +# Strategy: join the whole file into one logical string, locate the +# `list(... ExternalData_URL_TEMPLATES ... )` invocation by matching +# balanced parentheses, then print every quoted template inside it that +# contains %(hash). + +readarray -t URL_TEMPLATES < <( + awk ' + BEGIN { depth = 0; in_block = 0 } + { + line = $0 + # Trim leading whitespace. + sub(/^[[:space:]]+/, "", line) + + # Enter the block when we see `list(` followed somewhere by + # `ExternalData_URL_TEMPLATES` at depth 1. We buffer tokens + # at depth 1 until we are sure. + if (!in_block && line ~ /^list[[:space:]]*\(/) { + pending_list = 1 + depth = 1 + next + } + if (pending_list) { + if (line ~ /ExternalData_URL_TEMPLATES/) { + in_block = 1 + pending_list = 0 + next + } + # Track depth so we know when the list(...) we rejected ends. + n_open = gsub(/\(/, "(", line) + n_close = gsub(/\)/, ")", line) + depth += n_open - n_close + if (depth <= 0) { pending_list = 0; depth = 0 } + next + } + + if (in_block) { + if (line ~ /^#/) next + if (line ~ /^\)/) { in_block = 0; depth = 0; next } + if (match(line, /"[^"]+"/)) { + tmpl = substr(line, RSTART + 1, RLENGTH - 2) + if (tmpl ~ /%\(hash\)/) print tmpl + } + } + } + ' "$CMAKE_FILE" +) + +if [[ ${#URL_TEMPLATES[@]} -eq 0 ]]; then + die "Failed to parse ExternalData_URL_TEMPLATES from $CMAKE_FILE" +fi + +info "Loaded ${#URL_TEMPLATES[@]} gateway template(s) from CMake/ITKExternalData.cmake" + +# --------------------------------------------------------------------------- +# Helpers +# --------------------------------------------------------------------------- + +# Algorithm mapping: extension -> uppercase algorithm name for %(algo). +# Matches CMake ExternalData.cmake's _ExternalData_link_content behaviour. +algo_uc_for_ext() { + case "$1" in + md5) echo "MD5" ;; + sha1) echo "SHA1" ;; + sha224) echo "SHA224" ;; + sha256) echo "SHA256" ;; + sha384) echo "SHA384" ;; + sha512) echo "SHA512" ;; + # CID uses a lowercase override in ITKExternalData.cmake + # (ExternalData_URL_ALGO_CID_lower = cid). + cid) echo "cid" ;; + *) return 1 ;; + esac +} + +hash_tool_for_ext() { + case "$1" in + md5) echo "md5sum" ;; + sha1) echo "sha1sum" ;; + sha224) echo "sha224sum" ;; + sha256) echo "sha256sum" ;; + sha384) echo "sha384sum" ;; + sha512) echo "sha512sum" ;; + *) return 1 ;; + esac +} + +# Substitute %(algo) / %(hash) in a URL template. +render_url() { + local template="$1" algo="$2" hash="$3" + local url="${template//%(algo)/$algo}" + url="${url//%(hash)/$hash}" + printf '%s\n' "$url" +} + +# Fetch a content link into a tempfile, verifying the bytes correspond to +# the declared digest or CID. Prints the path of the verified tempfile on +# success. +# +# For CID links: `ipfs cat` is the primary fetch path because the daemon +# verifies the returned bytes server-side against the requested CID. +# Public IPFS HTTP gateways (paths containing /ipfs/) also verify +# server-side, so fetches from those URLs are accepted without local +# recomputation. Local `ipfs add --only-hash` is NOT used for verification +# because it can produce a different CID from the stored one when the +# original upload used non-default chunker or hash parameters — chunker +# drift is exactly what the UnixFS v1 2025 profile is meant to fix, so a +# mismatch would be expected, not an error. +# +# For hash links (.md5, .shaNNN): non-IPFS gateways only serve bytes by +# name, so we recompute the digest locally and compare. +fetch_and_verify() { + local ext="$1" # cid / md5 / shaNNN + local value="$2" # the actual hash or CID + local out + out="$(mktemp -t itk-content-link.XXXXXX)" + + local algo_uc + algo_uc="$(algo_uc_for_ext "$ext")" || { + warn "Unknown content-link extension: .${ext}" + rm -f "$out" + return 1 + } + + # Fast path for .cid: fetch via the running daemon. Verification is + # implicit — the daemon refuses to return bytes that do not hash back + # to the CID. + if [[ "$ext" == "cid" ]]; then + if ipfs cat "$value" > "$out" 2>/dev/null && [[ -s "$out" ]]; then + printf '%s\n' "$out" + return 0 + fi + fi + + local template rendered + for template in "${URL_TEMPLATES[@]}"; do + rendered="$(render_url "$template" "$algo_uc" "$value")" + + # IPFS gateway templates (path contains /ipfs/) only make sense for CIDs. + if [[ "$ext" != "cid" && "$rendered" == *"/ipfs/"* ]]; then + continue + fi + + if ! curl -sfL --connect-timeout 10 --max-time 120 -o "$out" "$rendered"; then + continue + fi + + if verify_bytes "$ext" "$value" "$out" "$rendered"; then + printf '%s\n' "$out" + return 0 + else + warn " content from ${rendered} did not verify; trying next gateway" + fi + done + + rm -f "$out" + return 1 +} + +# Verify that the fetched bytes at $file correspond to the declared link. +# +# For CID links: trust only fetches from IPFS HTTP gateways, which verify +# server-side (a CID-indexed path the server actually serves is by +# definition a path whose bytes hash to that CID). +# +# For hash links: recompute the digest and compare case-insensitively. +verify_bytes() { + local ext="$1" expected="$2" file="$3" source_url="${4:-}" + if [[ "$ext" == "cid" ]]; then + # IPFS HTTP gateways do server-side verification; accept those. + if [[ "$source_url" == *"/ipfs/"* ]]; then + [[ -s "$file" ]] + return + fi + # Non-IPFS origin (e.g. GitHub Pages mirror at .../CID/) — + # we cannot verify locally without risking chunker-drift false + # negatives, so reject. The `ipfs cat` fast path in + # fetch_and_verify is the canonical way to resolve a .cid. + return 1 + fi + + local tool actual + tool="$(hash_tool_for_ext "$ext")" || return 1 + command -v "$tool" >/dev/null 2>&1 || return 1 + actual="$("$tool" "$file" | awk '{print $1}')" + [[ "${actual,,}" == "${expected,,}" ]] +} + +# --------------------------------------------------------------------------- +# Enumerate targets +# --------------------------------------------------------------------------- + +if [[ -f "$TARGET" ]]; then + LINKS=("$TARGET") +else + LINKS=() + readarray -t LINKS < <( + find "$TARGET" -type f \( \ + -name "*.cid" \ + -o -name "*.md5" \ + -o -name "*.sha1" \ + -o -name "*.sha224" \ + -o -name "*.sha256" \ + -o -name "*.sha384" \ + -o -name "*.sha512" \ + \) | LC_ALL=C sort + ) +fi + +# Filter by --hash-only / --cid-only. Iterate defensively so `set -u` on an +# empty LINKS array (e.g. directory with no content links) does not error +# out on bash versions before 4.4. +FILTERED=() +if [[ ${#LINKS[@]} -gt 0 ]]; then + for link in "${LINKS[@]}"; do + ext="${link##*.}" + if $HASH_ONLY && [[ "$ext" == "cid" ]]; then continue; fi + if $CID_ONLY && [[ "$ext" != "cid" ]]; then continue; fi + FILTERED+=("$link") + done +fi + +if [[ ${#FILTERED[@]} -eq 0 ]]; then + info "No matching content links under ${TARGET}. Nothing to do." + exit 0 +fi + +LINKS=("${FILTERED[@]}") + +info "Processing ${#LINKS[@]} content link(s)..." +$DRY_RUN && info "(--dry-run: no files will be modified)" + +# --------------------------------------------------------------------------- +# Main loop +# --------------------------------------------------------------------------- + +UPLOAD_ARGS=() +if [[ -n "$TESTING_DATA_REPO" ]]; then + UPLOAD_ARGS+=(--testing-data-repo "$TESTING_DATA_REPO") +fi + +FAIL=0 + +for link in "${LINKS[@]}"; do + ext="${link##*.}" + value="$(tr -d '[:space:]' < "$link")" + real_file="${link%.${ext}}" + + if [[ -z "$value" ]]; then + printf 'FAIL %s empty-content-link\n' "$link" >&2 + FAIL=$((FAIL + 1)) + continue + fi + + if $DRY_RUN; then + printf 'WOULD-NORMALIZE %s (%s=%s) -> %s.cid\n' \ + "$link" "$ext" "$value" "$real_file" + continue + fi + + info "Normalizing ${link} (${ext}=${value})" + + if [[ -e "$real_file" ]]; then + die "Refusing to normalize: ${real_file} already exists on disk. Delete or move it first." + fi + + tmp_bytes="" + if ! tmp_bytes="$(fetch_and_verify "$ext" "$value")"; then + printf 'FAIL %s fetch-or-verify-failed\n' "$link" >&2 + FAIL=$((FAIL + 1)) + continue + fi + + # Stage the real file next to the link, then re-upload via ipfs-upload.sh. + mv "$tmp_bytes" "$real_file" + + # Remove the old content link BEFORE running ipfs-upload.sh — the upload + # script rejects inputs that look like content links (defensive guard), + # but we also want a clean working tree if the upload fails. + rm -f "$link" + + if ! "$UPLOAD_SCRIPT" "${UPLOAD_ARGS[@]}" "$real_file"; then + printf 'FAIL %s upload-failed\n' "$link" >&2 + # Best-effort recovery: restore the original link file from its value. + # ipfs-upload.sh writes the .cid file and removes the data file before + # updating the manifest, so a failure in the manifest step can leave a + # .cid orphan alongside the restored original link — clean it up too. + printf '%s\n' "$value" > "$link" + rm -f "$real_file" "${real_file}.cid" + FAIL=$((FAIL + 1)) + continue + fi + + printf 'NORMALIZE %s (%s) -> %s.cid\n' "$link" "$ext" "$real_file" +done + +if (( FAIL > 0 )); then + warn "${FAIL} content link(s) failed to normalize." + exit 2 +fi + +info "Done. Review changes and commit as a STYLE: commit (see Documentation/AI/git-commits.md)." diff --git a/Utilities/Maintenance/ExternalDataUpload/ipfs-pin-all.sh b/Utilities/Maintenance/ExternalDataUpload/ipfs-pin-all.sh new file mode 100755 index 00000000000..9286a4f83cd --- /dev/null +++ b/Utilities/Maintenance/ExternalDataUpload/ipfs-pin-all.sh @@ -0,0 +1,176 @@ +#!/usr/bin/env bash +set -euo pipefail + +# Batch-pin every CID in Testing/Data/content-links.manifest locally and on +# every configured remote pinning service (itk-pinata, itk-filebase, ...). +# +# Usage: ipfs-pin-all.sh [--background] +# +# Options: +# --background Queue remote pins asynchronously (faster, no wait). + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +REPO_ROOT="$(cd "$SCRIPT_DIR/../../.." && pwd)" +MANIFEST="$REPO_ROOT/Testing/Data/content-links.manifest" + +BACKGROUND="" +while [[ $# -gt 0 ]]; do + case "$1" in + --background) + BACKGROUND="--background" + shift + ;; + -h|--help) + sed -n '4,11p' "${BASH_SOURCE[0]}" | sed 's/^# \{0,1\}//' + exit 0 + ;; + *) + echo "ERROR: Unknown argument: $1" >&2 + echo "Usage: $(basename "${BASH_SOURCE[0]}") [--background]" >&2 + exit 2 + ;; + esac +done + +# --------------------------------------------------------------------------- +# Validate manifest +# --------------------------------------------------------------------------- + +if [[ ! -f "$MANIFEST" ]]; then + echo "ERROR: Manifest not found: $MANIFEST" >&2 + exit 1 +fi + +ENTRY_COUNT="$(grep -Evc '^(#|$)' "$MANIFEST" || true)" +if [[ "$ENTRY_COUNT" -eq 0 ]]; then + echo "Manifest is empty — nothing to pin." + exit 0 +fi + +# --------------------------------------------------------------------------- +# Prerequisites +# --------------------------------------------------------------------------- + +if ! command -v ipfs &>/dev/null; then + echo "ERROR: 'ipfs' command not found on PATH." >&2 + echo " Install Kubo: https://docs.ipfs.tech/install/command-line/" >&2 + exit 1 +fi + +if ! ipfs swarm peers &>/dev/null; then + echo "ERROR: IPFS daemon does not appear to be running." >&2 + echo " Start with: ipfs daemon" >&2 + exit 1 +fi + +# Discover configured remote services (none required for batch pinning). +CONFIGURED_SERVICES="$(ipfs pin remote service ls 2>/dev/null || true)" +SERVICES=() +while IFS= read -r line; do + svc="$(echo "$line" | awk '{print $1}')" + if [[ -n "$svc" ]]; then + SERVICES+=("$svc") + fi +done <<< "$CONFIGURED_SERVICES" + +if [[ ${#SERVICES[@]} -eq 0 ]]; then + echo "WARNING: No remote pinning services configured." >&2 + echo " Only local pinning will be performed." >&2 +fi + +# --------------------------------------------------------------------------- +# Pin each CID +# --------------------------------------------------------------------------- + +TOTAL=0 +LOCAL_FAILED=0 +LOCAL_FAILED_ENTRIES=() +REMOTE_FAILED=0 +REMOTE_FAILED_ENTRIES=() + +echo "==> Pinning ${ENTRY_COUNT} CIDs from manifest..." +if [[ -n "$BACKGROUND" ]]; then + echo " (remote pins queued in background)" +fi +echo "" + +while IFS= read -r line; do + # Skip comments and empty lines. + [[ "$line" =~ ^# ]] && continue + [[ -z "$line" ]] && continue + + CID="$(echo "$line" | awk '{print $1}')" + FILEPATH="$(echo "$line" | awk '{print $2}')" + + # Skip malformed lines (missing CID or filepath). + if [[ -z "$CID" || -z "$FILEPATH" ]]; then + echo "WARNING: Skipping malformed manifest line: $line" >&2 + continue + fi + + PIN_NAME="$(basename "$FILEPATH")" + TOTAL=$((TOTAL + 1)) + + echo "==> [${TOTAL}/${ENTRY_COUNT}] ${FILEPATH}" + echo " CID: ${CID}" + + # Local pin. + if ! ipfs pin add "$CID" >/dev/null 2>&1; then + echo " FAILED: local pin" >&2 + LOCAL_FAILED=$((LOCAL_FAILED + 1)) + LOCAL_FAILED_ENTRIES+=("$FILEPATH") + continue + fi + echo " OK: local" + + # Remote pins. + for svc in "${SERVICES[@]}"; do + # Skip services where this CID is already queued/pinning/pinned — + # Pinata rejects duplicate `pin remote add` calls with + # DUPLICATE_OBJECT (400). Same guard as ipfs-upload.sh. + if ipfs pin remote ls --service="$svc" --cid="$CID" \ + --status=queued,pinning,pinned 2>/dev/null | grep -q .; then + echo " OK: ${svc} (already pinned)" + continue + fi + + if ipfs pin remote add --service="$svc" --name="$PIN_NAME" $BACKGROUND "$CID" >/dev/null 2>&1; then + echo " OK: ${svc}" + else + echo " FAILED: ${svc}" >&2 + REMOTE_FAILED=$((REMOTE_FAILED + 1)) + if ! printf '%s\n' "${REMOTE_FAILED_ENTRIES[@]+"${REMOTE_FAILED_ENTRIES[@]}"}" | grep -qxF "$FILEPATH"; then + REMOTE_FAILED_ENTRIES+=("$FILEPATH") + fi + fi + done +done < "$MANIFEST" + +# --------------------------------------------------------------------------- +# Summary +# --------------------------------------------------------------------------- + +echo "" +echo "==> Batch pin complete: ${TOTAL} CIDs processed." + +EXIT_CODE=0 + +if [[ $LOCAL_FAILED -gt 0 ]]; then + echo "" >&2 + echo "ERROR: ${LOCAL_FAILED} CID(s) failed local pinning:" >&2 + for entry in "${LOCAL_FAILED_ENTRIES[@]}"; do + echo " - ${entry}" >&2 + done + EXIT_CODE=1 +fi + +if [[ $REMOTE_FAILED -gt 0 ]]; then + echo "" >&2 + echo "WARNING: ${REMOTE_FAILED} remote pin submission(s) failed:" >&2 + for entry in "${REMOTE_FAILED_ENTRIES[@]}"; do + echo " - ${entry}" >&2 + done + EXIT_CODE=1 +fi + +exit $EXIT_CODE diff --git a/Utilities/Maintenance/ExternalDataUpload/ipfs-upload.sh b/Utilities/Maintenance/ExternalDataUpload/ipfs-upload.sh new file mode 100755 index 00000000000..e6a76f86953 --- /dev/null +++ b/Utilities/Maintenance/ExternalDataUpload/ipfs-upload.sh @@ -0,0 +1,308 @@ +#!/usr/bin/env bash +set -euo pipefail + +# Upload a file to IPFS (CIDv1, UnixFS v1 2025 profile), pin it on the +# itk-pinata and itk-filebase remote pinning services, and replace the +# original with a .cid content link. Optionally mirror the bytes into a +# local ITKTestingData checkout at CID/. +# +# Usage: +# ipfs-upload.sh [--testing-data-repo ] +# +# Options: +# --testing-data-repo Path to a local clone of +# https://github.com/InsightSoftwareConsortium/ITKTestingData +# The uploaded bytes are copied to +# /CID/ and `git add`ed there. +# Skipped with a warning for files > 50 MB, +# which GitHub rejects. +# +# Prerequisites: +# - Kubo (go-ipfs) installed and `ipfs` on PATH +# - IPFS daemon running (ipfs daemon, or IPFS Desktop) +# - UnixFS v1 2025 profile applied: `ipfs config profile apply unixfs-v1-2025` +# - `itk-pinata` and `itk-filebase` remote pinning services configured +# +# See README.md in this directory for full setup. + +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" +REPO_ROOT="$(cd "$SCRIPT_DIR/../../.." && pwd)" + +# Required remote pinning services — script errors if not configured. +REQUIRED_SERVICES=(itk-pinata itk-filebase) + +# GitHub hard-rejects pushes containing any file > 50 MB. The ITKTestingData +# mirror step is skipped for files over this limit. +GITHUB_FILE_LIMIT_BYTES=$((50 * 1024 * 1024)) + +# --------------------------------------------------------------------------- +# Argument parsing +# --------------------------------------------------------------------------- + +TESTING_DATA_REPO="" +FILE="" + +while [[ $# -gt 0 ]]; do + case "$1" in + --testing-data-repo) + TESTING_DATA_REPO="${2:?--testing-data-repo requires a path}" + shift 2 + ;; + --testing-data-repo=*) + TESTING_DATA_REPO="${1#--testing-data-repo=}" + shift + ;; + -h|--help) + sed -n '3,/^$/{ s/^# \?//; p }' "$0" + exit 0 + ;; + -*) + echo "ERROR: Unknown option: $1" >&2 + exit 1 + ;; + *) + if [[ -n "$FILE" ]]; then + echo "ERROR: Unexpected positional argument: $1" >&2 + exit 1 + fi + FILE="$1" + shift + ;; + esac +done + +if [[ -z "$FILE" ]]; then + echo "Usage: $0 [--testing-data-repo ] " >&2 + exit 1 +fi + +if [[ ! -f "$FILE" ]]; then + echo "ERROR: File not found: $FILE" >&2 + exit 1 +fi + +# Guard: reject symlinks (realpath would resolve to the target, and rm would +# delete the target file rather than the symlink itself). +if [[ -L "$FILE" ]]; then + echo "ERROR: Symlink paths are not supported: $FILE" >&2 + echo " Pass the real file path instead." >&2 + exit 1 +fi + +ABSOLUTE_FILE="$(realpath "$FILE")" + +# Guard: file must be inside the repository. +if [[ "$ABSOLUTE_FILE" != "$REPO_ROOT"/* ]]; then + echo "ERROR: File must be inside the repository: $ABSOLUTE_FILE" >&2 + exit 1 +fi + +# Guard: reject files that are already content links. +for ext in cid md5 sha1 sha224 sha256 sha384 sha512; do + if [[ "$FILE" == *."${ext}" ]]; then + echo "ERROR: File is already a .${ext} content link: $FILE" >&2 + exit 1 + fi +done + +REL_FILE="${ABSOLUTE_FILE#"$REPO_ROOT/"}" + +# Guard: reject paths with whitespace (manifest format uses space as delimiter). +if [[ "$REL_FILE" =~ [[:space:]] ]]; then + echo "ERROR: Filepath contains whitespace, which is not supported: $REL_FILE" >&2 + echo " Rename the file to remove spaces before uploading." >&2 + exit 1 +fi + +PIN_NAME="$(basename "$ABSOLUTE_FILE")" + +# --------------------------------------------------------------------------- +# Validate --testing-data-repo path (before any IPFS work) +# --------------------------------------------------------------------------- + +if [[ -n "$TESTING_DATA_REPO" ]]; then + if [[ ! -d "$TESTING_DATA_REPO" ]]; then + echo "ERROR: --testing-data-repo path is not a directory: $TESTING_DATA_REPO" >&2 + exit 1 + fi + if [[ ! -d "$TESTING_DATA_REPO/.git" ]]; then + echo "ERROR: --testing-data-repo is not a git checkout: $TESTING_DATA_REPO" >&2 + exit 1 + fi + TESTING_DATA_REPO="$(realpath "$TESTING_DATA_REPO")" +fi + +# --------------------------------------------------------------------------- +# Prerequisites +# --------------------------------------------------------------------------- + +if ! command -v ipfs &>/dev/null; then + echo "ERROR: 'ipfs' command not found on PATH." >&2 + echo " Install Kubo: https://docs.ipfs.tech/install/command-line/" >&2 + echo " See: Utilities/Maintenance/ExternalDataUpload/README.md" >&2 + exit 1 +fi + +if ! ipfs swarm peers &>/dev/null; then + echo "ERROR: IPFS daemon does not appear to be running." >&2 + echo " Start with: ipfs daemon" >&2 + echo " Or launch IPFS Desktop." >&2 + exit 1 +fi + +# Check required remote pinning services are configured. +CONFIGURED_SERVICES="$(ipfs pin remote service ls 2>/dev/null || true)" +for svc in "${REQUIRED_SERVICES[@]}"; do + if ! echo "$CONFIGURED_SERVICES" | grep -q "^${svc} "; then + echo "ERROR: Required pinning service '${svc}' is not configured." >&2 + echo " See: Utilities/Maintenance/ExternalDataUpload/README.md" >&2 + exit 1 + fi +done + +# --------------------------------------------------------------------------- +# Add to IPFS +# --------------------------------------------------------------------------- + +echo "==> Adding ${PIN_NAME} to IPFS (CIDv1, UnixFS v1 2025 profile)..." +CID="$(ipfs add --cid-version=1 --quieter "$ABSOLUTE_FILE")" + +if [[ -z "$CID" ]]; then + echo "ERROR: ipfs add returned an empty CID." >&2 + exit 1 +fi + +echo " CID: ${CID}" + +# --------------------------------------------------------------------------- +# Pin locally (ipfs add already pins, but be explicit) +# --------------------------------------------------------------------------- + +echo "==> Pinning locally..." +ipfs pin add "$CID" >/dev/null + +# --------------------------------------------------------------------------- +# Pin on remote services +# --------------------------------------------------------------------------- + +FAILED_PINS=() + +for svc in "${REQUIRED_SERVICES[@]}"; do + echo "==> Pinning on ${svc}..." + if ipfs pin remote add --service="$svc" --name="$PIN_NAME" "$CID" 2>&1; then + echo " OK: ${svc}" + else + echo " FAILED: ${svc}" >&2 + FAILED_PINS+=("$svc") + fi +done + +if [[ ${#FAILED_PINS[@]} -gt 0 ]]; then + echo "" >&2 + echo "ERROR: Remote pin failed for: ${FAILED_PINS[*]}" >&2 + echo " The original file has NOT been modified." >&2 + echo " Fix the issue and retry, or pin manually:" >&2 + for failed_svc in "${FAILED_PINS[@]}"; do + echo " ipfs pin remote add --service=${failed_svc} --name=\"${PIN_NAME}\" ${CID}" >&2 + done + exit 1 +fi + +# --------------------------------------------------------------------------- +# Mirror into ITKTestingData (optional, size-gated) +# --------------------------------------------------------------------------- + +FILE_SIZE_BYTES="$(stat -c '%s' "$ABSOLUTE_FILE" 2>/dev/null || stat -f '%z' "$ABSOLUTE_FILE")" + +if [[ -n "$TESTING_DATA_REPO" ]]; then + if (( FILE_SIZE_BYTES > GITHUB_FILE_LIMIT_BYTES )); then + echo "" >&2 + echo "WARNING: ${PIN_NAME} is ${FILE_SIZE_BYTES} bytes (> 50 MB)." >&2 + echo " GitHub rejects pushes containing files > 50 MB, so it" >&2 + echo " will NOT be mirrored to ITKTestingData." >&2 + echo " IPFS pin (local + itk-pinata + itk-filebase) succeeded;" >&2 + echo " the .cid content link will still be produced." >&2 + else + MIRROR_DIR="$TESTING_DATA_REPO/CID" + MIRROR_PATH="$MIRROR_DIR/$CID" + mkdir -p "$MIRROR_DIR" + echo "==> Mirroring to ITKTestingData: CID/${CID}" + cp "$ABSOLUTE_FILE" "$MIRROR_PATH" + if ! git -C "$TESTING_DATA_REPO" add "CID/$CID"; then + echo "ERROR: Failed to 'git add CID/$CID' in $TESTING_DATA_REPO" >&2 + rm -f "$MIRROR_PATH" + exit 1 + fi + fi +fi + +# --------------------------------------------------------------------------- +# Replace original file with .cid content link +# (only reached after all required remote pins succeeded) +# +# Ordering hazard: the .cid file is written and the original data file is +# removed BEFORE the manifest update below. If the process is killed or hits +# a disk-full error between here and the `mv` of "${MANIFEST}.tmp", the +# original is gone, the .cid link exists, but the manifest is not updated. +# content-link-normalize.sh's recovery block restores the original link and +# also removes any orphan .cid in that case. A standalone `ipfs-upload.sh` +# crash here leaves the working tree consistent (CID file present, original +# absent) but the manifest stale; the user can re-run after repairing. +# --------------------------------------------------------------------------- + +CID_FILE="${ABSOLUTE_FILE}.cid" +REL_CID="${CID_FILE#"$REPO_ROOT/"}" +printf '%s\n' "$CID" > "$CID_FILE" +rm "$ABSOLUTE_FILE" + +# --------------------------------------------------------------------------- +# Update content link manifest +# --------------------------------------------------------------------------- + +MANIFEST="$REPO_ROOT/Testing/Data/content-links.manifest" + +if [[ -f "$MANIFEST" ]]; then + # Remove existing entry for this filepath (re-upload case). + # Use awk for exact string match (grep would treat dots as wildcards). + awk -v path="$REL_FILE" '$2 != path' "$MANIFEST" > "${MANIFEST}.tmp" + mv "${MANIFEST}.tmp" "$MANIFEST" +else + # Seed a fresh manifest with a brief header. + cat > "$MANIFEST" <<'EOF' +# ITK content-link manifest +# One CID per line, format: +# Maintained by Utilities/Maintenance/ExternalDataUpload/ipfs-upload.sh +EOF +fi + +# Append the new entry. +printf '%s %s\n' "$CID" "$REL_FILE" >> "$MANIFEST" + +# Sort data lines by filepath; preserve comment header at top. +{ + grep '^#' "$MANIFEST" || true + grep -v '^#' "$MANIFEST" | grep -v '^$' | LC_ALL=C sort -k2 +} > "${MANIFEST}.tmp" +mv "${MANIFEST}.tmp" "$MANIFEST" + +echo "" +echo "==> Upload complete." +echo " CID: ${CID}" +echo " Link: ${CID_FILE}" + +# --------------------------------------------------------------------------- +# Suggest git commands +# --------------------------------------------------------------------------- + +echo "" +echo "Next steps (ITK repository):" +echo " git rm \"${REL_FILE}\"" +echo " git add \"${REL_CID}\"" +echo " git add Testing/Data/content-links.manifest" + +if [[ -n "$TESTING_DATA_REPO" && $FILE_SIZE_BYTES -le $GITHUB_FILE_LIMIT_BYTES ]]; then + echo "" + echo "Next steps (ITKTestingData repository at ${TESTING_DATA_REPO}):" + echo " git -C \"${TESTING_DATA_REPO}\" commit -m \"Add ${PIN_NAME} (${CID})\"" + echo " git -C \"${TESTING_DATA_REPO}\" push" +fi From 3597566b72f4733208846da8c98da262bd39ec8d Mon Sep 17 00:00:00 2001 From: Matt McCormick Date: Thu, 23 Apr 2026 13:27:34 -0400 Subject: [PATCH 2/7] ENH: Support background remote pinning in ExternalDataUpload scripts Add `--background` to both `ipfs-upload.sh` and `content-link-normalize.sh` to submit remote pin requests asynchronously via `ipfs pin remote add --background`. The default remains synchronous (surfaces failures immediately, safer for one-off uploads); `--background` is intended for batch runs where waiting for each remote to reach `pinned` (minutes per file on Filebase) would be impractical. Also dedup remote-pin submission: before calling `ipfs pin remote add`, query `ipfs pin remote ls --status=queued,pinning,pinned` for the CID and skip the add if a pin already exists on that service. This avoids Pinata's `DUPLICATE_OBJECT` (400) error on re-runs of previously uploaded content, and prevents Filebase from accumulating duplicate queue entries. README.md and SKILL.md document the new flag, the synchronous vs. asynchronous tradeoff, and the post-run verification command (`ipfs pin remote ls --status=...`). --- .../Maintenance/ExternalDataUpload/README.md | 26 +++++++++++++ .../Maintenance/ExternalDataUpload/SKILL.md | 4 ++ .../content-link-normalize.sh | 11 ++++++ .../ExternalDataUpload/ipfs-upload.sh | 38 ++++++++++++++++--- 4 files changed, 74 insertions(+), 5 deletions(-) diff --git a/Utilities/Maintenance/ExternalDataUpload/README.md b/Utilities/Maintenance/ExternalDataUpload/README.md index 50bc029f2ba..33a01c28283 100644 --- a/Utilities/Maintenance/ExternalDataUpload/README.md +++ b/Utilities/Maintenance/ExternalDataUpload/README.md @@ -228,8 +228,34 @@ content-link-normalize.sh Testing/Data/Input --testing-data-repo ~/src/ITKTestin # Only process files that are currently .md5 / .shaNNN (skip existing .cid). content-link-normalize.sh Modules --hash-only + +# Batch run with asynchronous remote pinning (returns without waiting for +# each remote to reach 'pinned'). Verify afterwards with `ipfs pin remote ls`. +content-link-normalize.sh Modules --hash-only --background +``` + +### Synchronous vs. asynchronous remote pinning + +Both `ipfs-upload.sh` and `content-link-normalize.sh` default to +**synchronous** remote pinning: `ipfs pin remote add` blocks until the +remote reports `pinned`, which surfaces failures immediately and is +safest for one-off uploads. Remote fetch can take minutes per file, +however, which is impractical for batch runs. + +Pass `--background` to submit pin requests asynchronously — the remote +queues the pin and fetches the content itself, and the script returns +right away. Check final pin state with: + +```bash +ipfs pin remote ls --service=itk-pinata --status=queued,pinning,pinned +ipfs pin remote ls --service=itk-filebase --status=queued,pinning,pinned ``` +Both scripts also pre-check each remote for an existing pin on the same +CID and skip the `pin remote add` call if one is already queued, pinning, +or pinned — this prevents `DUPLICATE_OBJECT` (400) errors on Pinata when +re-running on already-uploaded content. + ## Content Link Manifest `Testing/Data/content-links.manifest` is a plain-text index of every CID the diff --git a/Utilities/Maintenance/ExternalDataUpload/SKILL.md b/Utilities/Maintenance/ExternalDataUpload/SKILL.md index ad170a7e643..77c1d8ed0b6 100644 --- a/Utilities/Maintenance/ExternalDataUpload/SKILL.md +++ b/Utilities/Maintenance/ExternalDataUpload/SKILL.md @@ -84,6 +84,10 @@ Useful options: - `--hash-only` — only touch `.md5` / `.shaNNN` links, leave `.cid` alone - `--cid-only` — only re-hash existing `.cid` links under the new profile - `--testing-data-repo ` — forwarded to `ipfs-upload.sh` +- `--background` — forwarded to `ipfs-upload.sh`; submit remote pins + asynchronously instead of waiting for each to reach `pinned`. Use for + batch runs where synchronous pinning would take minutes per file. + Verify final state afterwards with `ipfs pin remote ls`. The normalize script fetches bytes through the gateway templates in `CMake/ITKExternalData.cmake` (same order as the build), verifies them diff --git a/Utilities/Maintenance/ExternalDataUpload/content-link-normalize.sh b/Utilities/Maintenance/ExternalDataUpload/content-link-normalize.sh index bd01128eb36..93187d735f7 100755 --- a/Utilities/Maintenance/ExternalDataUpload/content-link-normalize.sh +++ b/Utilities/Maintenance/ExternalDataUpload/content-link-normalize.sh @@ -19,6 +19,12 @@ set -euo pipefail # Options: # --testing-data-repo Forwarded to ipfs-upload.sh. Local # ITKTestingData clone to mirror bytes into. +# --background Forwarded to ipfs-upload.sh. Submit remote +# pin requests asynchronously; useful for +# batch runs where waiting for each pin to +# reach 'pinned' status (minutes per file) +# is impractical. Verify final pin state +# afterwards with `ipfs pin remote ls`. # --dry-run List what would change without modifying. # --hash-only Process only .md5 / .shaNNN links # (leave existing .cid links alone). @@ -50,6 +56,7 @@ show_help() { # --------------------------------------------------------------------------- TESTING_DATA_REPO="" +BACKGROUND=false DRY_RUN=false HASH_ONLY=false CID_ONLY=false @@ -61,6 +68,7 @@ while [[ $# -gt 0 ]]; do --dry-run) DRY_RUN=true; shift ;; --hash-only) HASH_ONLY=true; shift ;; --cid-only) CID_ONLY=true; shift ;; + --background) BACKGROUND=true; shift ;; --testing-data-repo) TESTING_DATA_REPO="${2:?--testing-data-repo requires a path}" shift 2 @@ -352,6 +360,9 @@ UPLOAD_ARGS=() if [[ -n "$TESTING_DATA_REPO" ]]; then UPLOAD_ARGS+=(--testing-data-repo "$TESTING_DATA_REPO") fi +if $BACKGROUND; then + UPLOAD_ARGS+=(--background) +fi FAIL=0 diff --git a/Utilities/Maintenance/ExternalDataUpload/ipfs-upload.sh b/Utilities/Maintenance/ExternalDataUpload/ipfs-upload.sh index e6a76f86953..621464be36c 100755 --- a/Utilities/Maintenance/ExternalDataUpload/ipfs-upload.sh +++ b/Utilities/Maintenance/ExternalDataUpload/ipfs-upload.sh @@ -7,7 +7,7 @@ set -euo pipefail # local ITKTestingData checkout at CID/. # # Usage: -# ipfs-upload.sh [--testing-data-repo ] +# ipfs-upload.sh [--testing-data-repo ] [--background] # # Options: # --testing-data-repo Path to a local clone of @@ -16,6 +16,15 @@ set -euo pipefail # /CID/ and `git add`ed there. # Skipped with a warning for files > 50 MB, # which GitHub rejects. +# --background Submit remote pin requests asynchronously +# (pins queue at itk-pinata / itk-filebase and +# the script returns without waiting). Useful +# for batch workflows. Default is synchronous, +# which blocks until each remote reports +# 'pinned' — safer for one-off uploads because +# failures surface immediately, but can take +# minutes per file as the remote fetches the +# content. # # Prerequisites: # - Kubo (go-ipfs) installed and `ipfs` on PATH @@ -40,6 +49,7 @@ GITHUB_FILE_LIMIT_BYTES=$((50 * 1024 * 1024)) # --------------------------------------------------------------------------- TESTING_DATA_REPO="" +BACKGROUND="" FILE="" while [[ $# -gt 0 ]]; do @@ -52,6 +62,10 @@ while [[ $# -gt 0 ]]; do TESTING_DATA_REPO="${1#--testing-data-repo=}" shift ;; + --background) + BACKGROUND="--background" + shift + ;; -h|--help) sed -n '3,/^$/{ s/^# \?//; p }' "$0" exit 0 @@ -188,8 +202,22 @@ ipfs pin add "$CID" >/dev/null FAILED_PINS=() for svc in "${REQUIRED_SERVICES[@]}"; do - echo "==> Pinning on ${svc}..." - if ipfs pin remote add --service="$svc" --name="$PIN_NAME" "$CID" 2>&1; then + # Skip services where this CID is already queued/pinning/pinned — + # Pinata rejects duplicate `pin remote add` calls with + # DUPLICATE_OBJECT (400), and resubmitting on Filebase just makes a + # second queue entry. + if ipfs pin remote ls --service="$svc" --cid="$CID" \ + --status=queued,pinning,pinned 2>/dev/null | grep -q .; then + echo "==> Already pinned (or in flight) on ${svc}; skipping" + continue + fi + + if [[ -n "$BACKGROUND" ]]; then + echo "==> Queueing pin on ${svc} (background)..." + else + echo "==> Pinning on ${svc}..." + fi + if ipfs pin remote add --service="$svc" --name="$PIN_NAME" $BACKGROUND "$CID" 2>&1; then echo " OK: ${svc}" else echo " FAILED: ${svc}" >&2 @@ -199,11 +227,11 @@ done if [[ ${#FAILED_PINS[@]} -gt 0 ]]; then echo "" >&2 - echo "ERROR: Remote pin failed for: ${FAILED_PINS[*]}" >&2 + echo "ERROR: Remote pin submission failed for: ${FAILED_PINS[*]}" >&2 echo " The original file has NOT been modified." >&2 echo " Fix the issue and retry, or pin manually:" >&2 for failed_svc in "${FAILED_PINS[@]}"; do - echo " ipfs pin remote add --service=${failed_svc} --name=\"${PIN_NAME}\" ${CID}" >&2 + echo " ipfs pin remote add --service=${failed_svc} --name=\"${PIN_NAME}\" ${BACKGROUND} ${CID}" >&2 done exit 1 fi From c168ca6e19598af70b94eca48434248809cd5598 Mon Sep 17 00:00:00 2001 From: Matt McCormick Date: Thu, 23 Apr 2026 13:27:52 -0400 Subject: [PATCH 3/7] STYLE: Normalize AnisotropicDiffusionLBR test data to CID content links MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Convert the 24 `.md5` content links in Modules/Filtering/AnisotropicDiffusionLBR/test/{Input,Baseline}/ to `.cid` links under the UnixFS v1 2025 profile, produced by `Utilities/Maintenance/ExternalDataUpload/content-link-normalize.sh --hash-only --background`. Bytes were fetched through the gateway templates in CMake/ITKExternalData.cmake, verified against each declared MD5 hash, and re-uploaded; all new CIDs are pinned locally plus on `itk-pinata` and `itk-filebase`. Record the 24 new CIDs in Testing/Data/content-links.manifest along with two additional entries picked up as a `--cid-only` sampling run (CurvatureAnisotropicDiffusionImageFilter.2.png and warp3D.nii.gz), both of which re-hashed to identical CIDs — confirming existing `.cid` links in the tree are already compatible with the 2025 profile. No test semantics change: `CMake/ITKExternalData.cmake` resolves `DATA{...}` references by whichever `.md5` / `.sha256` / `.cid` link sits next to the referenced path, so the filter tests continue to fetch the same bytes. --- .../test/Baseline/Cos3D_cCED.vtk.cid | 1 + .../test/Baseline/Cos3D_cCED.vtk.md5 | 1 - .../test/Baseline/FingerPrint_I_20.png.cid | 1 + .../test/Baseline/FingerPrint_I_20.png.md5 | 1 - .../test/Baseline/FingerPrint_cCED_20.png.cid | 1 + .../test/Baseline/FingerPrint_cCED_20.png.md5 | 1 - .../test/Baseline/FingerPrint_cEED_20.png.cid | 1 + .../test/Baseline/FingerPrint_cEED_20.png.md5 | 1 - .../test/Baseline/Lena_Detail_I_2.png.cid | 1 + .../test/Baseline/Lena_Detail_I_2.png.md5 | 1 - .../test/Baseline/Lena_Detail_cCED_2.png.cid | 1 + .../test/Baseline/Lena_Detail_cCED_2.png.md5 | 1 - .../test/Baseline/Lena_Detail_cEED_2.png.cid | 1 + .../test/Baseline/Lena_Detail_cEED_2.png.md5 | 1 - .../test/Baseline/Oscillations1_CED.png.cid | 1 + .../test/Baseline/Oscillations1_CED.png.md5 | 1 - .../test/Baseline/Oscillations1_cCED.png.cid | 1 + .../test/Baseline/Oscillations1_cCED.png.md5 | 1 - .../test/Baseline/PacMan_I.png.cid | 1 + .../test/Baseline/PacMan_I.png.md5 | 1 - .../test/Baseline/PacMan_cCED.png.cid | 1 + .../test/Baseline/PacMan_cCED.png.md5 | 1 - .../test/Baseline/PacMan_cEED.png.cid | 1 + .../test/Baseline/PacMan_cEED.png.md5 | 1 - .../test/Baseline/Triangle_EED.png.cid | 1 + .../test/Baseline/Triangle_EED.png.md5 | 1 - .../test/Baseline/Triangle_cEED.png.cid | 1 + .../test/Baseline/Triangle_cEED.png.md5 | 1 - .../Baseline/VectorField_Circle_cEED.vtk.cid | 1 + .../Baseline/VectorField_Circle_cEED.vtk.md5 | 1 - .../test/Baseline/mrbrain_cEED.vtk.cid | 1 + .../test/Baseline/mrbrain_cEED.vtk.md5 | 1 - .../test/Input/Cos3D_Noisy.vtk.cid | 1 + .../test/Input/Cos3D_Noisy.vtk.md5 | 1 - .../test/Input/FingerPrint.png.cid | 1 + .../test/Input/FingerPrint.png.md5 | 1 - .../test/Input/Lena_Detail.png.cid | 1 + .../test/Input/Lena_Detail.png.md5 | 1 - .../test/Input/Oscillations_Noisy1.png.cid | 1 + .../test/Input/Oscillations_Noisy1.png.md5 | 1 - .../test/Input/PacMan.png.cid | 1 + .../test/Input/PacMan.png.md5 | 1 - .../test/Input/Triangle.png.cid | 1 + .../test/Input/Triangle.png.md5 | 1 - .../Input/VectorField_CircleOpposites.vtk.cid | 1 + .../Input/VectorField_CircleOpposites.vtk.md5 | 1 - .../test/Input/mrbrain_noisy.vtk.cid | 1 + .../test/Input/mrbrain_noisy.vtk.md5 | 1 - Testing/Data/content-links.manifest | 26 +++++++++++++++++++ 49 files changed, 50 insertions(+), 24 deletions(-) create mode 100644 Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Cos3D_cCED.vtk.cid delete mode 100644 Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Cos3D_cCED.vtk.md5 create mode 100644 Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/FingerPrint_I_20.png.cid delete mode 100644 Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/FingerPrint_I_20.png.md5 create mode 100644 Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/FingerPrint_cCED_20.png.cid delete mode 100644 Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/FingerPrint_cCED_20.png.md5 create mode 100644 Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/FingerPrint_cEED_20.png.cid delete mode 100644 Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/FingerPrint_cEED_20.png.md5 create mode 100644 Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Lena_Detail_I_2.png.cid delete mode 100644 Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Lena_Detail_I_2.png.md5 create mode 100644 Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Lena_Detail_cCED_2.png.cid delete mode 100644 Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Lena_Detail_cCED_2.png.md5 create mode 100644 Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Lena_Detail_cEED_2.png.cid delete mode 100644 Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Lena_Detail_cEED_2.png.md5 create mode 100644 Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Oscillations1_CED.png.cid delete mode 100644 Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Oscillations1_CED.png.md5 create mode 100644 Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Oscillations1_cCED.png.cid delete mode 100644 Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Oscillations1_cCED.png.md5 create mode 100644 Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/PacMan_I.png.cid delete mode 100644 Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/PacMan_I.png.md5 create mode 100644 Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/PacMan_cCED.png.cid delete mode 100644 Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/PacMan_cCED.png.md5 create mode 100644 Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/PacMan_cEED.png.cid delete mode 100644 Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/PacMan_cEED.png.md5 create mode 100644 Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Triangle_EED.png.cid delete mode 100644 Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Triangle_EED.png.md5 create mode 100644 Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Triangle_cEED.png.cid delete mode 100644 Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Triangle_cEED.png.md5 create mode 100644 Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/VectorField_Circle_cEED.vtk.cid delete mode 100644 Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/VectorField_Circle_cEED.vtk.md5 create mode 100644 Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/mrbrain_cEED.vtk.cid delete mode 100644 Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/mrbrain_cEED.vtk.md5 create mode 100644 Modules/Filtering/AnisotropicDiffusionLBR/test/Input/Cos3D_Noisy.vtk.cid delete mode 100644 Modules/Filtering/AnisotropicDiffusionLBR/test/Input/Cos3D_Noisy.vtk.md5 create mode 100644 Modules/Filtering/AnisotropicDiffusionLBR/test/Input/FingerPrint.png.cid delete mode 100644 Modules/Filtering/AnisotropicDiffusionLBR/test/Input/FingerPrint.png.md5 create mode 100644 Modules/Filtering/AnisotropicDiffusionLBR/test/Input/Lena_Detail.png.cid delete mode 100644 Modules/Filtering/AnisotropicDiffusionLBR/test/Input/Lena_Detail.png.md5 create mode 100644 Modules/Filtering/AnisotropicDiffusionLBR/test/Input/Oscillations_Noisy1.png.cid delete mode 100644 Modules/Filtering/AnisotropicDiffusionLBR/test/Input/Oscillations_Noisy1.png.md5 create mode 100644 Modules/Filtering/AnisotropicDiffusionLBR/test/Input/PacMan.png.cid delete mode 100644 Modules/Filtering/AnisotropicDiffusionLBR/test/Input/PacMan.png.md5 create mode 100644 Modules/Filtering/AnisotropicDiffusionLBR/test/Input/Triangle.png.cid delete mode 100644 Modules/Filtering/AnisotropicDiffusionLBR/test/Input/Triangle.png.md5 create mode 100644 Modules/Filtering/AnisotropicDiffusionLBR/test/Input/VectorField_CircleOpposites.vtk.cid delete mode 100644 Modules/Filtering/AnisotropicDiffusionLBR/test/Input/VectorField_CircleOpposites.vtk.md5 create mode 100644 Modules/Filtering/AnisotropicDiffusionLBR/test/Input/mrbrain_noisy.vtk.cid delete mode 100644 Modules/Filtering/AnisotropicDiffusionLBR/test/Input/mrbrain_noisy.vtk.md5 diff --git a/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Cos3D_cCED.vtk.cid b/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Cos3D_cCED.vtk.cid new file mode 100644 index 00000000000..e493e31b03b --- /dev/null +++ b/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Cos3D_cCED.vtk.cid @@ -0,0 +1 @@ +bafkreifmtmpjuppizngftzcnt3ilufa66dajy3i6xogn3jfirveqw63cwu diff --git a/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Cos3D_cCED.vtk.md5 b/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Cos3D_cCED.vtk.md5 deleted file mode 100644 index 3df854dded2..00000000000 --- a/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Cos3D_cCED.vtk.md5 +++ /dev/null @@ -1 +0,0 @@ -df95fdb0657f7f8472bdc16c73c5bed0 \ No newline at end of file diff --git a/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/FingerPrint_I_20.png.cid b/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/FingerPrint_I_20.png.cid new file mode 100644 index 00000000000..45edb5677d4 --- /dev/null +++ b/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/FingerPrint_I_20.png.cid @@ -0,0 +1 @@ +bafkreif4yuyueovggfvnjj3qrnct54nrm52pfconktlvavzq7kt64jo3ji diff --git a/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/FingerPrint_I_20.png.md5 b/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/FingerPrint_I_20.png.md5 deleted file mode 100644 index 3b5dbe8354c..00000000000 --- a/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/FingerPrint_I_20.png.md5 +++ /dev/null @@ -1 +0,0 @@ -e4e5e233b434ea4c85059d7c62f15554 \ No newline at end of file diff --git a/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/FingerPrint_cCED_20.png.cid b/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/FingerPrint_cCED_20.png.cid new file mode 100644 index 00000000000..e3ec6f7a548 --- /dev/null +++ b/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/FingerPrint_cCED_20.png.cid @@ -0,0 +1 @@ +bafkreicjpqxemmg3lgvigudscqqaiy3mjrgm3tm3ep7ngskw3is3fix2qi diff --git a/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/FingerPrint_cCED_20.png.md5 b/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/FingerPrint_cCED_20.png.md5 deleted file mode 100644 index 750c29db86b..00000000000 --- a/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/FingerPrint_cCED_20.png.md5 +++ /dev/null @@ -1 +0,0 @@ -07435f1d44aeb66fd98e642945437662 \ No newline at end of file diff --git a/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/FingerPrint_cEED_20.png.cid b/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/FingerPrint_cEED_20.png.cid new file mode 100644 index 00000000000..fae7f8f2427 --- /dev/null +++ b/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/FingerPrint_cEED_20.png.cid @@ -0,0 +1 @@ +bafkreigxse36jyrc4cs6w77vi6qfd223dhnhrf6v2yuipxcl2wq2h2xjky diff --git a/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/FingerPrint_cEED_20.png.md5 b/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/FingerPrint_cEED_20.png.md5 deleted file mode 100644 index 76c2e516e80..00000000000 --- a/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/FingerPrint_cEED_20.png.md5 +++ /dev/null @@ -1 +0,0 @@ -5e752e3fa0e46bb530fedc94c7794c73 \ No newline at end of file diff --git a/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Lena_Detail_I_2.png.cid b/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Lena_Detail_I_2.png.cid new file mode 100644 index 00000000000..a82e5548b05 --- /dev/null +++ b/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Lena_Detail_I_2.png.cid @@ -0,0 +1 @@ +bafkreigh7wfs6kgfhqwxcnbt22c4panshoarkda5tdmykstolnwzvkw26m diff --git a/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Lena_Detail_I_2.png.md5 b/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Lena_Detail_I_2.png.md5 deleted file mode 100644 index b80ed7eb732..00000000000 --- a/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Lena_Detail_I_2.png.md5 +++ /dev/null @@ -1 +0,0 @@ -ff36663855e6794712b081689aac70e5 \ No newline at end of file diff --git a/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Lena_Detail_cCED_2.png.cid b/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Lena_Detail_cCED_2.png.cid new file mode 100644 index 00000000000..9a9b0f36397 --- /dev/null +++ b/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Lena_Detail_cCED_2.png.cid @@ -0,0 +1 @@ +bafkreicbrlzmtr2t3a22hgudfnbicgmvikg2hioxzpzxvw5ncwzmdlwpeq diff --git a/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Lena_Detail_cCED_2.png.md5 b/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Lena_Detail_cCED_2.png.md5 deleted file mode 100644 index f6a2c0d79ae..00000000000 --- a/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Lena_Detail_cCED_2.png.md5 +++ /dev/null @@ -1 +0,0 @@ -b741b80ce65e20c59f286244f621344a \ No newline at end of file diff --git a/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Lena_Detail_cEED_2.png.cid b/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Lena_Detail_cEED_2.png.cid new file mode 100644 index 00000000000..7b2b845692d --- /dev/null +++ b/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Lena_Detail_cEED_2.png.cid @@ -0,0 +1 @@ +bafkreigcrzclz5tri2yhsu63lax4kxabjna652kayppzqp7u3hilwgadte diff --git a/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Lena_Detail_cEED_2.png.md5 b/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Lena_Detail_cEED_2.png.md5 deleted file mode 100644 index d529c7f1aa4..00000000000 --- a/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Lena_Detail_cEED_2.png.md5 +++ /dev/null @@ -1 +0,0 @@ -c606fea9c82019d1b4e80d351b803d92 \ No newline at end of file diff --git a/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Oscillations1_CED.png.cid b/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Oscillations1_CED.png.cid new file mode 100644 index 00000000000..bdd66c9283b --- /dev/null +++ b/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Oscillations1_CED.png.cid @@ -0,0 +1 @@ +bafkreibpqafrxnmo2m2gixrkml6g54hvzgiyiyaafxug7zyameeulrpiua diff --git a/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Oscillations1_CED.png.md5 b/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Oscillations1_CED.png.md5 deleted file mode 100644 index b5cb7d21602..00000000000 --- a/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Oscillations1_CED.png.md5 +++ /dev/null @@ -1 +0,0 @@ -50da37ff706c93536c0f33390da4287c \ No newline at end of file diff --git a/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Oscillations1_cCED.png.cid b/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Oscillations1_cCED.png.cid new file mode 100644 index 00000000000..4404e61996e --- /dev/null +++ b/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Oscillations1_cCED.png.cid @@ -0,0 +1 @@ +bafkreihnab2o426g2ffmwj42a4wlrusk5fzev3jwxxjmgw3z4hgq3rosmu diff --git a/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Oscillations1_cCED.png.md5 b/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Oscillations1_cCED.png.md5 deleted file mode 100644 index 35dfbe815ba..00000000000 --- a/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Oscillations1_cCED.png.md5 +++ /dev/null @@ -1 +0,0 @@ -21e83dc09f4c58a44eeb676e49ec3d99 \ No newline at end of file diff --git a/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/PacMan_I.png.cid b/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/PacMan_I.png.cid new file mode 100644 index 00000000000..54757b866fd --- /dev/null +++ b/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/PacMan_I.png.cid @@ -0,0 +1 @@ +bafkreiafl54ccpviaq4nm7vufz3wfceoxl5y2bosbvtkyb45btihrgt6ae diff --git a/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/PacMan_I.png.md5 b/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/PacMan_I.png.md5 deleted file mode 100644 index 6a1cf5d0156..00000000000 --- a/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/PacMan_I.png.md5 +++ /dev/null @@ -1 +0,0 @@ -c5b358267defea8babcfebbc66c9fa8b \ No newline at end of file diff --git a/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/PacMan_cCED.png.cid b/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/PacMan_cCED.png.cid new file mode 100644 index 00000000000..75fd56c7753 --- /dev/null +++ b/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/PacMan_cCED.png.cid @@ -0,0 +1 @@ +bafkreib2z57ja7aqbz4ddzpprpgxwj3vojaeaijbmeddnbdon65y52536u diff --git a/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/PacMan_cCED.png.md5 b/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/PacMan_cCED.png.md5 deleted file mode 100644 index 223ef256a52..00000000000 --- a/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/PacMan_cCED.png.md5 +++ /dev/null @@ -1 +0,0 @@ -fd8d652016508d93ee861c1db83f3ed4 \ No newline at end of file diff --git a/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/PacMan_cEED.png.cid b/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/PacMan_cEED.png.cid new file mode 100644 index 00000000000..4589307d9e9 --- /dev/null +++ b/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/PacMan_cEED.png.cid @@ -0,0 +1 @@ +bafkreihod4nsri7yzrd7h354snfk24xenhwaqy4rpt356vqcuqupwrcu3a diff --git a/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/PacMan_cEED.png.md5 b/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/PacMan_cEED.png.md5 deleted file mode 100644 index a08c69480fd..00000000000 --- a/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/PacMan_cEED.png.md5 +++ /dev/null @@ -1 +0,0 @@ -edf293e2cce2eae1df4f8598e8179641 \ No newline at end of file diff --git a/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Triangle_EED.png.cid b/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Triangle_EED.png.cid new file mode 100644 index 00000000000..224854e8b33 --- /dev/null +++ b/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Triangle_EED.png.cid @@ -0,0 +1 @@ +bafkreiaslkugrrcu3wvgfvhyqyt4p4voolthjbrjlwwyhvppsmdb4sb2au diff --git a/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Triangle_EED.png.md5 b/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Triangle_EED.png.md5 deleted file mode 100644 index 09f2a9ab526..00000000000 --- a/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Triangle_EED.png.md5 +++ /dev/null @@ -1 +0,0 @@ -f823b62e9135a37c7438fa07a9e54096 \ No newline at end of file diff --git a/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Triangle_cEED.png.cid b/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Triangle_cEED.png.cid new file mode 100644 index 00000000000..5098a3e9e4a --- /dev/null +++ b/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Triangle_cEED.png.cid @@ -0,0 +1 @@ +bafkreicwjq4f2xajppwq3kye2cc2mnkvn5dfqlc7uoexm7qbftya6sxbcm diff --git a/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Triangle_cEED.png.md5 b/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Triangle_cEED.png.md5 deleted file mode 100644 index ff3bfe4ac73..00000000000 --- a/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Triangle_cEED.png.md5 +++ /dev/null @@ -1 +0,0 @@ -12473a0cb8d3afa0f8d7eb4f61e6216b \ No newline at end of file diff --git a/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/VectorField_Circle_cEED.vtk.cid b/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/VectorField_Circle_cEED.vtk.cid new file mode 100644 index 00000000000..24ac0cfdf01 --- /dev/null +++ b/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/VectorField_Circle_cEED.vtk.cid @@ -0,0 +1 @@ +bafkreib2k7buke3tmpvugklmy56q2a466thyegmzn5jdhz4y4jm5bhwuiy diff --git a/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/VectorField_Circle_cEED.vtk.md5 b/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/VectorField_Circle_cEED.vtk.md5 deleted file mode 100644 index 77458bed992..00000000000 --- a/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/VectorField_Circle_cEED.vtk.md5 +++ /dev/null @@ -1 +0,0 @@ -23c4495de1a746648418abc144972e92 \ No newline at end of file diff --git a/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/mrbrain_cEED.vtk.cid b/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/mrbrain_cEED.vtk.cid new file mode 100644 index 00000000000..a741f6bcf1a --- /dev/null +++ b/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/mrbrain_cEED.vtk.cid @@ -0,0 +1 @@ +bafybeiffbnw2lggwcdgjuanqhhmur7ntoc7f5wgypbm5cjab2bkjtkjnvy diff --git a/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/mrbrain_cEED.vtk.md5 b/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/mrbrain_cEED.vtk.md5 deleted file mode 100644 index 9e91116b83f..00000000000 --- a/Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/mrbrain_cEED.vtk.md5 +++ /dev/null @@ -1 +0,0 @@ -db41c262fba84a75eb399e1e154a5974 \ No newline at end of file diff --git a/Modules/Filtering/AnisotropicDiffusionLBR/test/Input/Cos3D_Noisy.vtk.cid b/Modules/Filtering/AnisotropicDiffusionLBR/test/Input/Cos3D_Noisy.vtk.cid new file mode 100644 index 00000000000..5030d3f4b37 --- /dev/null +++ b/Modules/Filtering/AnisotropicDiffusionLBR/test/Input/Cos3D_Noisy.vtk.cid @@ -0,0 +1 @@ +bafkreiew5kj3pus2c3cis7t57cfiiekkzdhfgt44ygfoafsaebk2kvxike diff --git a/Modules/Filtering/AnisotropicDiffusionLBR/test/Input/Cos3D_Noisy.vtk.md5 b/Modules/Filtering/AnisotropicDiffusionLBR/test/Input/Cos3D_Noisy.vtk.md5 deleted file mode 100644 index c48093eec6b..00000000000 --- a/Modules/Filtering/AnisotropicDiffusionLBR/test/Input/Cos3D_Noisy.vtk.md5 +++ /dev/null @@ -1 +0,0 @@ -3a7d9131a732794fcb4100909cd3fd1c \ No newline at end of file diff --git a/Modules/Filtering/AnisotropicDiffusionLBR/test/Input/FingerPrint.png.cid b/Modules/Filtering/AnisotropicDiffusionLBR/test/Input/FingerPrint.png.cid new file mode 100644 index 00000000000..5cbf09ebca5 --- /dev/null +++ b/Modules/Filtering/AnisotropicDiffusionLBR/test/Input/FingerPrint.png.cid @@ -0,0 +1 @@ +bafkreig2pwvdimswvimhz43bmhrrcozyf233wn7txir5hx6jtykws5qzvm diff --git a/Modules/Filtering/AnisotropicDiffusionLBR/test/Input/FingerPrint.png.md5 b/Modules/Filtering/AnisotropicDiffusionLBR/test/Input/FingerPrint.png.md5 deleted file mode 100644 index 2992b7a556c..00000000000 --- a/Modules/Filtering/AnisotropicDiffusionLBR/test/Input/FingerPrint.png.md5 +++ /dev/null @@ -1 +0,0 @@ -ed7342b4598d44574b2714834b705cad \ No newline at end of file diff --git a/Modules/Filtering/AnisotropicDiffusionLBR/test/Input/Lena_Detail.png.cid b/Modules/Filtering/AnisotropicDiffusionLBR/test/Input/Lena_Detail.png.cid new file mode 100644 index 00000000000..0cd60fda78f --- /dev/null +++ b/Modules/Filtering/AnisotropicDiffusionLBR/test/Input/Lena_Detail.png.cid @@ -0,0 +1 @@ +bafkreic5pnb5dbbpbo6fgjk3atkjzdafuqbtzpfxeq7dfc724zsjnggvcu diff --git a/Modules/Filtering/AnisotropicDiffusionLBR/test/Input/Lena_Detail.png.md5 b/Modules/Filtering/AnisotropicDiffusionLBR/test/Input/Lena_Detail.png.md5 deleted file mode 100644 index 2d8222d7362..00000000000 --- a/Modules/Filtering/AnisotropicDiffusionLBR/test/Input/Lena_Detail.png.md5 +++ /dev/null @@ -1 +0,0 @@ -45a1845c6fa452c7465bebda5bbe9b0f \ No newline at end of file diff --git a/Modules/Filtering/AnisotropicDiffusionLBR/test/Input/Oscillations_Noisy1.png.cid b/Modules/Filtering/AnisotropicDiffusionLBR/test/Input/Oscillations_Noisy1.png.cid new file mode 100644 index 00000000000..80613d5c325 --- /dev/null +++ b/Modules/Filtering/AnisotropicDiffusionLBR/test/Input/Oscillations_Noisy1.png.cid @@ -0,0 +1 @@ +bafkreiae437zhanhbgmn2oxy2xyzee3ux5s2eyek6hsgk5bzd7lhwz6yzm diff --git a/Modules/Filtering/AnisotropicDiffusionLBR/test/Input/Oscillations_Noisy1.png.md5 b/Modules/Filtering/AnisotropicDiffusionLBR/test/Input/Oscillations_Noisy1.png.md5 deleted file mode 100644 index 4797a352a96..00000000000 --- a/Modules/Filtering/AnisotropicDiffusionLBR/test/Input/Oscillations_Noisy1.png.md5 +++ /dev/null @@ -1 +0,0 @@ -75818e9d765fb6838a8cf5845ac19b9a \ No newline at end of file diff --git a/Modules/Filtering/AnisotropicDiffusionLBR/test/Input/PacMan.png.cid b/Modules/Filtering/AnisotropicDiffusionLBR/test/Input/PacMan.png.cid new file mode 100644 index 00000000000..8569b10850a --- /dev/null +++ b/Modules/Filtering/AnisotropicDiffusionLBR/test/Input/PacMan.png.cid @@ -0,0 +1 @@ +bafkreibhkebc4kkb5ysuelc2kfllixc2xq3looqfkvrq6f3qx5a42cvm5q diff --git a/Modules/Filtering/AnisotropicDiffusionLBR/test/Input/PacMan.png.md5 b/Modules/Filtering/AnisotropicDiffusionLBR/test/Input/PacMan.png.md5 deleted file mode 100644 index 0abec1e6758..00000000000 --- a/Modules/Filtering/AnisotropicDiffusionLBR/test/Input/PacMan.png.md5 +++ /dev/null @@ -1 +0,0 @@ -d7955368c6f49cbb451d8901aa40add6 \ No newline at end of file diff --git a/Modules/Filtering/AnisotropicDiffusionLBR/test/Input/Triangle.png.cid b/Modules/Filtering/AnisotropicDiffusionLBR/test/Input/Triangle.png.cid new file mode 100644 index 00000000000..5881f9b832c --- /dev/null +++ b/Modules/Filtering/AnisotropicDiffusionLBR/test/Input/Triangle.png.cid @@ -0,0 +1 @@ +bafkreicey264ntq4ew4wnlyoy23cu5k3cmxwvlm7fg4r5skcbtwhsh5jfe diff --git a/Modules/Filtering/AnisotropicDiffusionLBR/test/Input/Triangle.png.md5 b/Modules/Filtering/AnisotropicDiffusionLBR/test/Input/Triangle.png.md5 deleted file mode 100644 index c5594b80c04..00000000000 --- a/Modules/Filtering/AnisotropicDiffusionLBR/test/Input/Triangle.png.md5 +++ /dev/null @@ -1 +0,0 @@ -bce40d3af4f491d728aaba8bb8c9ede9 \ No newline at end of file diff --git a/Modules/Filtering/AnisotropicDiffusionLBR/test/Input/VectorField_CircleOpposites.vtk.cid b/Modules/Filtering/AnisotropicDiffusionLBR/test/Input/VectorField_CircleOpposites.vtk.cid new file mode 100644 index 00000000000..7ff05bad9e0 --- /dev/null +++ b/Modules/Filtering/AnisotropicDiffusionLBR/test/Input/VectorField_CircleOpposites.vtk.cid @@ -0,0 +1 @@ +bafkreics5ulkrwki4epnu6l4umam3on7ovr5ao4yfva7f2odkkp25j26gm diff --git a/Modules/Filtering/AnisotropicDiffusionLBR/test/Input/VectorField_CircleOpposites.vtk.md5 b/Modules/Filtering/AnisotropicDiffusionLBR/test/Input/VectorField_CircleOpposites.vtk.md5 deleted file mode 100644 index e664c2ad407..00000000000 --- a/Modules/Filtering/AnisotropicDiffusionLBR/test/Input/VectorField_CircleOpposites.vtk.md5 +++ /dev/null @@ -1 +0,0 @@ -0a9e85b2b8dfadb4ab25b828a0f23852 \ No newline at end of file diff --git a/Modules/Filtering/AnisotropicDiffusionLBR/test/Input/mrbrain_noisy.vtk.cid b/Modules/Filtering/AnisotropicDiffusionLBR/test/Input/mrbrain_noisy.vtk.cid new file mode 100644 index 00000000000..ebd35daafb0 --- /dev/null +++ b/Modules/Filtering/AnisotropicDiffusionLBR/test/Input/mrbrain_noisy.vtk.cid @@ -0,0 +1 @@ +bafybeid6ongwkdpv3manmr4qpu22zybq4frslcqj3ysmkxc5tmps2aqnfu diff --git a/Modules/Filtering/AnisotropicDiffusionLBR/test/Input/mrbrain_noisy.vtk.md5 b/Modules/Filtering/AnisotropicDiffusionLBR/test/Input/mrbrain_noisy.vtk.md5 deleted file mode 100644 index 2e105fba437..00000000000 --- a/Modules/Filtering/AnisotropicDiffusionLBR/test/Input/mrbrain_noisy.vtk.md5 +++ /dev/null @@ -1 +0,0 @@ -ff88f04e75dc478b283ecdf39d8d7687 \ No newline at end of file diff --git a/Testing/Data/content-links.manifest b/Testing/Data/content-links.manifest index d9d6fca9994..10e3f127b37 100644 --- a/Testing/Data/content-links.manifest +++ b/Testing/Data/content-links.manifest @@ -12,3 +12,29 @@ # Paths must not contain whitespace (the manifest uses a single space as # the field delimiter). Data lines are kept sorted by path; comment lines # above the first data line are preserved on re-write. +bafkreifmtmpjuppizngftzcnt3ilufa66dajy3i6xogn3jfirveqw63cwu Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Cos3D_cCED.vtk +bafkreif4yuyueovggfvnjj3qrnct54nrm52pfconktlvavzq7kt64jo3ji Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/FingerPrint_I_20.png +bafkreicjpqxemmg3lgvigudscqqaiy3mjrgm3tm3ep7ngskw3is3fix2qi Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/FingerPrint_cCED_20.png +bafkreigxse36jyrc4cs6w77vi6qfd223dhnhrf6v2yuipxcl2wq2h2xjky Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/FingerPrint_cEED_20.png +bafkreigh7wfs6kgfhqwxcnbt22c4panshoarkda5tdmykstolnwzvkw26m Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Lena_Detail_I_2.png +bafkreicbrlzmtr2t3a22hgudfnbicgmvikg2hioxzpzxvw5ncwzmdlwpeq Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Lena_Detail_cCED_2.png +bafkreigcrzclz5tri2yhsu63lax4kxabjna652kayppzqp7u3hilwgadte Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Lena_Detail_cEED_2.png +bafkreibpqafrxnmo2m2gixrkml6g54hvzgiyiyaafxug7zyameeulrpiua Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Oscillations1_CED.png +bafkreihnab2o426g2ffmwj42a4wlrusk5fzev3jwxxjmgw3z4hgq3rosmu Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Oscillations1_cCED.png +bafkreiafl54ccpviaq4nm7vufz3wfceoxl5y2bosbvtkyb45btihrgt6ae Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/PacMan_I.png +bafkreib2z57ja7aqbz4ddzpprpgxwj3vojaeaijbmeddnbdon65y52536u Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/PacMan_cCED.png +bafkreihod4nsri7yzrd7h354snfk24xenhwaqy4rpt356vqcuqupwrcu3a Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/PacMan_cEED.png +bafkreiaslkugrrcu3wvgfvhyqyt4p4voolthjbrjlwwyhvppsmdb4sb2au Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Triangle_EED.png +bafkreicwjq4f2xajppwq3kye2cc2mnkvn5dfqlc7uoexm7qbftya6sxbcm Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/Triangle_cEED.png +bafkreib2k7buke3tmpvugklmy56q2a466thyegmzn5jdhz4y4jm5bhwuiy Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/VectorField_Circle_cEED.vtk +bafybeiffbnw2lggwcdgjuanqhhmur7ntoc7f5wgypbm5cjab2bkjtkjnvy Modules/Filtering/AnisotropicDiffusionLBR/test/Baseline/mrbrain_cEED.vtk +bafkreiew5kj3pus2c3cis7t57cfiiekkzdhfgt44ygfoafsaebk2kvxike Modules/Filtering/AnisotropicDiffusionLBR/test/Input/Cos3D_Noisy.vtk +bafkreig2pwvdimswvimhz43bmhrrcozyf233wn7txir5hx6jtykws5qzvm Modules/Filtering/AnisotropicDiffusionLBR/test/Input/FingerPrint.png +bafkreic5pnb5dbbpbo6fgjk3atkjzdafuqbtzpfxeq7dfc724zsjnggvcu Modules/Filtering/AnisotropicDiffusionLBR/test/Input/Lena_Detail.png +bafkreiae437zhanhbgmn2oxy2xyzee3ux5s2eyek6hsgk5bzd7lhwz6yzm Modules/Filtering/AnisotropicDiffusionLBR/test/Input/Oscillations_Noisy1.png +bafkreibhkebc4kkb5ysuelc2kfllixc2xq3looqfkvrq6f3qx5a42cvm5q Modules/Filtering/AnisotropicDiffusionLBR/test/Input/PacMan.png +bafkreicey264ntq4ew4wnlyoy23cu5k3cmxwvlm7fg4r5skcbtwhsh5jfe Modules/Filtering/AnisotropicDiffusionLBR/test/Input/Triangle.png +bafkreics5ulkrwki4epnu6l4umam3on7ovr5ao4yfva7f2odkkp25j26gm Modules/Filtering/AnisotropicDiffusionLBR/test/Input/VectorField_CircleOpposites.vtk +bafybeid6ongwkdpv3manmr4qpu22zybq4frslcqj3ysmkxc5tmps2aqnfu Modules/Filtering/AnisotropicDiffusionLBR/test/Input/mrbrain_noisy.vtk +bafkreia52ajz3mxwv5rusp33a6mcl7mphp772zkzqthc2xwlb7rmn6fsyy Testing/Data/Baseline/Filtering/CurvatureAnisotropicDiffusionImageFilter.2.png +bafybeidgydpaoeu6qv4jupn3apal7ri47zr2q2qar435d3l4mdri66opby Wrapping/images/warp3D.nii.gz From 3d8857e395caca60472478ab55469abd20ef6d53 Mon Sep 17 00:00:00 2001 From: Matt McCormick Date: Thu, 23 Apr 2026 13:33:24 -0400 Subject: [PATCH 4/7] ENH: macOS hash-tool fallback + trim pre-check to md5/sha512 MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit In content-link-normalize.sh, the prerequisite warning pre-check was iterating every sha variant (sha1/224/256/384/512) and requiring GNU coreutils `*sum` binaries. Two issues: 1. ITK content links in practice are only .md5 (legacy) and .sha512 (current), so warning about missing sha224/sha384 tools was noise. Narrow the pre-check to md5 and sha512. 2. macOS ships BSD `md5` and `shasum`, not coreutils `md5sum` / `sha512sum`. Warning on their absence was a false positive for macOS contributors, and the verification path invoked them by name ("$tool" "$file") so it would actually fail. Replace `hash_tool_for_ext` (name-only) with `hash_cmd_for_ext` that returns a full command line — preferring GNU `md5sum` / `shaNsum` when present, falling back to `md5 -r` (BSD md5 with md5sum-compatible output) and `shasum -a NNN` (BSD shasum). `verify_bytes` uses intentional word-splitting so the multi-word fallback (e.g. "shasum -a 256") expands to distinct argv entries. Addresses review at https://github.com/InsightSoftwareConsortium/ITK/pull/6111/files#r3132434963 --- .../content-link-normalize.sh | 66 +++++++++++++------ 1 file changed, 47 insertions(+), 19 deletions(-) diff --git a/Utilities/Maintenance/ExternalDataUpload/content-link-normalize.sh b/Utilities/Maintenance/ExternalDataUpload/content-link-normalize.sh index 93187d735f7..0b3a3fbaa9a 100755 --- a/Utilities/Maintenance/ExternalDataUpload/content-link-normalize.sh +++ b/Utilities/Maintenance/ExternalDataUpload/content-link-normalize.sh @@ -102,9 +102,46 @@ fi command -v curl >/dev/null 2>&1 || die "curl is required" command -v ipfs >/dev/null 2>&1 || die "ipfs is required (for CID recomputation)" -# Hash tools are only needed for links we actually encounter, but fail fast. -for tool in md5sum sha1sum sha224sum sha256sum sha384sum sha512sum; do - command -v "$tool" >/dev/null 2>&1 || warn "$tool not found; any matching content links will fail to verify" +# Resolve the local command that computes a digest for an algorithm. +# Returns a command line (possibly multi-word, e.g. "shasum -a 256") whose +# first whitespace-delimited token on stdout is the hex digest. +# +# Prefers GNU coreutils (`md5sum`, `shaNNNsum`) when present; falls back to +# the BSD/macOS tools that ship by default on macOS: `md5 -r` (output format +# matches md5sum) and `shasum -a NNN`. On macOS you can install coreutils +# via `brew install coreutils` to get the `*sum` variants as well. +hash_cmd_for_ext() { + case "$1" in + md5) + if command -v md5sum >/dev/null 2>&1; then + echo "md5sum" + elif command -v md5 >/dev/null 2>&1; then + # BSD md5; -r prints ` ` like md5sum. + echo "md5 -r" + else + return 1 + fi + ;; + sha1|sha224|sha256|sha384|sha512) + if command -v "${1}sum" >/dev/null 2>&1; then + echo "${1}sum" + elif command -v shasum >/dev/null 2>&1; then + echo "shasum -a ${1#sha}" + else + return 1 + fi + ;; + *) + return 1 + ;; + esac +} + +# ITK content links in practice use `.md5` (legacy) or `.sha512` (current); +# other sha variants are supported for completeness but not pre-checked. +for alg in md5 sha512; do + hash_cmd_for_ext "$alg" >/dev/null 2>&1 \ + || warn "no tool available to compute ${alg}; any .${alg} content links will fail to verify" done # --------------------------------------------------------------------------- @@ -195,18 +232,6 @@ algo_uc_for_ext() { esac } -hash_tool_for_ext() { - case "$1" in - md5) echo "md5sum" ;; - sha1) echo "sha1sum" ;; - sha224) echo "sha224sum" ;; - sha256) echo "sha256sum" ;; - sha384) echo "sha384sum" ;; - sha512) echo "sha512sum" ;; - *) return 1 ;; - esac -} - # Substitute %(algo) / %(hash) in a URL template. render_url() { local template="$1" algo="$2" hash="$3" @@ -301,10 +326,13 @@ verify_bytes() { return 1 fi - local tool actual - tool="$(hash_tool_for_ext "$ext")" || return 1 - command -v "$tool" >/dev/null 2>&1 || return 1 - actual="$("$tool" "$file" | awk '{print $1}')" + local cmd actual + cmd="$(hash_cmd_for_ext "$ext")" || return 1 + # Word-splitting is intentional — a fallback command like "shasum -a 256" + # expands to multiple argv entries, while the coreutils "md5sum" stays + # as a single argv entry. + # shellcheck disable=SC2086 + actual="$($cmd "$file" | awk '{print $1}')" [[ "${actual,,}" == "${expected,,}" ]] } From 65f7c84b851eb516b7aa76571bb5590d4a0ba866 Mon Sep 17 00:00:00 2001 From: Matt McCormick Date: Thu, 23 Apr 2026 13:44:06 -0400 Subject: [PATCH 5/7] DOC: Update binary-data contributor guides for ExternalDataUpload skill Rewrite Documentation/docs/contributing/upload_binary_data.md and data.md to describe the new Kubo + pinning-service workflow driven by Utilities/Maintenance/ExternalDataUpload/ipfs-upload.sh, replacing the obsolete web3.storage / w3cli and content-link-upload.itk.org instructions. Document the one-time Kubo + itk-pinata / itk-filebase setup, the upload script's behavior (CIDv1 under the UnixFS v1 2025 profile, synchronous vs. --background pinning, manifest update), the optional --testing-data-repo mirror step with the 50 MB GitHub limit, and the content-link-normalize.sh conversion workflow for legacy .md5 / .sha256 / .sha512 links. Refresh the storage-location list and testing-data figure caption to match the gateways enumerated in CMake/ITKExternalData.cmake, and remove the now-orphaned content-link-upload.png screenshot of the retired web app. --- .../docs/contributing/content-link-upload.png | Bin 73800 -> 0 bytes Documentation/docs/contributing/data.md | 33 ++- .../docs/contributing/upload_binary_data.md | 190 +++++++++++------- 3 files changed, 143 insertions(+), 80 deletions(-) delete mode 100644 Documentation/docs/contributing/content-link-upload.png diff --git a/Documentation/docs/contributing/content-link-upload.png b/Documentation/docs/contributing/content-link-upload.png deleted file mode 100644 index 32df15aa48602f9a545d38a3375405c53ad622b4..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 73800 zcmeGDWmH>l^e&2~N-3pii$ifJ?oJ8>w*tkj6f5p7X=#Dr4#h%oclT0?1}W~OxCD34 zS^V~Yk8{SjpYFJKjQ!=jAF{Hdb4`8bGv5{RUPTuBIq7o{2!t&!2T=!s9vOf@59^*i z1YVioc-{#dFdgM|T|gj;k021a(u2~^ZMz`Q8<0Fi;)7?}&VrZkhto#z!Nx$^Kyn`) z&kO?@Cw26@_{Z^oVE8$FU$hhTqKhA%>isGT^wEEJO;G0jp!Idht4cOPpRQ@W4sSeL z-IgccUP)>`dqyYuk@G928Clf;U0fQEOCpkr5rJE!lqmlWyiWwBvm81~kBjy_@=fZT zPmEE9KnP#Gs=Uqy696KDe9u2WZGLe7jPnBb`F~F`aqmz6*U6Xv_wfIP3SiS6_8`za zFELGoR`$qe-WQ#m10Mx2+R_Q7V_r!on(M|UblAW6NAbuh4&Rup9jT87TdeE?Y6hcjIpDbY&wo8dp+DEUjW`_Q%h7ze|EP ze`pnx#6y@^`W5$~TSabsX<`Bg+QDS6WojQjCcXRi;2j2W5J`!VsZRnbT~$TI#-P@C z9YMMOv*v-lk_2m*)jsI!w@gXU->(vdc=$20@!Adu0?3xSTXt<(_aXG!)IvF*Oa!CA z#gtw?)?7EyoUfKC^fza?RPUJ|0~ShSO~ zqoV^44=*MrhKq~qby$}zKkD@A=g*%VejFuQs8?MQwxD12#DL;zdT_B7!0D2%^ICUN z0qXK<3f80qHM7Ny89}NoibNz_2N7F|Sxi|>&jbFBkJHJ=&h6UU+O9s47{h;Ft5-T> zJGtdb-b3=@Y2?Z@S>E5ZQ*Rq<>oS8zbpr$2>KSVrn{s4+Uf%lA8Um8Tr11XzdtYB) zXXmO3>xi&0o@6D`!GEs2R<)#Rbm9C@o;?PX`dO7nj<%lEYk%@l-~_uw_U9cw?52Z? z&>I3bWAX^NKN4G$HQrno_xsiT@=ijmxi_2Z?m4oFB_VPrv81G=zJ2=!JW5GQzIfXC zk^U`>I$>@}Nl8J$=FR1C5{L1bck}*FOJW&9>E;Fd`ngl@+XA)h*w|Q^C`yXgulbos zw6iBZ29kbx_m1iDm$RqzRZP#<(i?AvvsftACi_#uUJ{A?{-gcD@b)BH$W)YwK%s+-%!mE_qzim-WW#uU+-qiMgfYxT^uU2 z;(ZnfdhiwS&0VXf**9aqe9p+P*VPl^RZ3RcOW=M7JZgJ;dwO~r z279&(id<-MUhDt*2qQEs%nQE1Qs@Z>yQ~kaA(9aN{d-L)_PD-r4z`oApT5wCzC>>b zAHMT>WZa=nD139eHXr9+&P0&Sq~}mNmpxLcCqp|qu65_;@Okz}2Zb(^g1WSHR{|nw zV*95|HYOHUSXdYqHn!%5k*k&wU+UntZ)9Fx-qF!fN(yOIkA}Yf93a#5G%*<&!pYz# z83)2yO>G2;zB>n;|IXVijTPbM=9b}imfX4YL&(KKM@OeUZIVi$w?XNP0~O#}#J*(= zOVt6RSZgXYL4-kaV@z_S{BoUE^hT~bm+jPOepMnoah)zn_yz)jC^u-Nk;m`k%*@O@ zJd^?~6%f#rA($1(%uD>@>9-DB{;8=cA|fKdhu|B#?#*=ahlhZL#T_G(=n8-S{I?PX z-gD%arwYmPygIQ;LY!`9M+F51on2o(`1r;o9T3K>DrkJYdjfcC$l{dyg()X7kW>Cz(af*ufBCk@7DQ1W|-sn--eGDFokxN+kf!p zGpU=RlP@YPbP@#q{sktcyqp{{w`+lLR|AiI{q<25)+&l}wB_t#yBO`nDYGUb|5ur3-=I!Sk zu|O^?5nuQYGt)f-Ug+7~bQKQk!l$C5A|#|kI0Hu0)YJ_2Ig4aP_?+!9v$OAQeFc08 zdA)dU-+DR;9}0gheYn~em#>n(-Q)29^av9`3Qyy96?TJVhm6}7So$+Ag4YUBU}LRT0O8R>i#YcaMKQA1fl zLCp;N_{4;pV1Kl^Zdg}rbo4LI@HfvA5)w{NPj4sgfW(p^>{gU7TNvUvnj;UsS;FZF zCm9>-BqJqN0dfyuU^wM7kif(Hz~PN$MYpF7&$o>0y|i7!%T?FT=Kr&Ltv)90(-jP* z_!7h%n#C@l3;{0iby#i~eF|!Od;AnDEOe--^;c-t`1m-0k_;O@78e$l6&IsQY<`%} zY;JBwN8?JX&D(n@D=Y6$#@hoqa%Zu%IX|D9G~#^GXa7$Kflwq+B%(x*4u|OVdq72{64+sbd3W6`tXO9Tl&DMkvFxEvfnCZIj%{O`={XC{RndV~X3L6@x zmmE(vw?U4y_qC&AX}y)h9&M?(Zj*sUuWR*>UMAmWYIpX7Z{<<8*tjYSr-z(cJP<|Q z-7-NY0~RhH%*>{u{{H>UYf&u(lx^;}{T}{79UtWs6hcEnw$3CRfujC8ED#20%+Sz~ z3SlccOa=CDkwwgZ4-Otq;X{=*?^)Ly7Ede!Ld7vCbGws-%>ka*=DLL+8VC}gxHskF z&@5l)qfst>X#_X#t`xk4>9j?gAjeJsYf@id-|v9RSJ&&`?E0xU#mnF7Pqv7ILPA26 z0TaTWc{DFjg5h1it88_C=w^?=vU-j6op$g}JNnbv(iet$V`%rA_uF!Ea_qI~fhwJx zoUDu+tW025HYdm85ih9O>tRX)NvR~x}h7#+vi?%^5emuC8ZK( z&-n6FI?=A_cX|#KlXfKK`GA`?8iZzXz90Y+m>rUJG4$@40(VWBz~$t13gmEijRs)BEPy)sAnNRZduKFGdbV!oa^DZqdJ^dz;t!A0S2>>Pl?34Ed1_l;JRij=bsrd4?}kC`mhl?28O#$S#W50_|5`-5H?wz=i&0f!9kfvaY@O}ImY>e zckePQa6pe_<5!_@wmI|!J@_mE)rwsk%g{~OkIUSDN}|quo8j?$f8gUS{M8(-wTGi+ zxkBu!tFRYbAX$l;n!0qZ^Y;LQMnM+S?pw#utOkQJ0Ehki_iu=yoHJaecz0~rOj_-+ z#HMg)C2ML-Os2Tqj7PIpv9gLvn?FcfCpP#7-t!0c>;PQk zbR;k9ZuhftaTQ9bNdE1aQXmt}+wnA6xa_TP6h(R8wW#-%-|e&{vSCtss%Q$u#==ru zR;D5+cYqyeNWSJM2(%WW^>pgq#0Iu^UVRtaHo<-q^V;ja~ux88MFt)3l9#%&tFc1GprS0=DnR+g&yCKY9b z7feo#QP75Wc2e6{B)<-3v%ty;e%-5WMFRm<3 zNPa~lqh=Pd2ew$aGme_aqsTMYsS6tkjzGr8x1vozq>2G=p8e)iPN3TY{7YI|+RaTx z-uk6{6>C_Qj;^j3knj8Z6_B$(SP==YURe&M6&947c(+>r)4+HsOdiQ&Ib_h}+P8w| zd+uPUkT*rGR6KE3)<3;(pOy&vlJTDT@s|hARSn$d@vY6RH4>zk>6Uq_6lD6|g&!>W zx%IT9!bdOlCWw&Jgy`OHx0LZEC& zs{uKYPTrcW5r_i!W*|lYclFXu_Fe~k2S^@8wF0&E^=_+KOrhICKCLYodPQcslNkwk zkCe-&OvOmQq+;ELKl<|ZA6{+-dPze*YY5^0T#C?+|K@fL+KR4g$O_wdg2RExS#y1X zaF2;?QJKmWK<<^h7ln<~_^)5bs^pVp!0js`1Zswc=~u+{SKiuEQjdC^`uVAL3cx4lb|O@Aj)jE< zl=gj)k>;h#k~Il2R#3jc&DZ2TdI0H#KC!;{L?FsAzplH=K` z30+|MpmnOK@hu=AT!yxjGc*h6dTLl^pH$katE)>&K19^9jpvViF43}4QW`iVVFDHd z13xu&hX+md)lOWdRPY6ZoZB+z@G*ptl5zwk_zGBfcq7fG*HZ^eog!rU03>1neEC_! z>{VV8_#%rQ&pjGT3VSC!{LhrXZ_>n{c*91TxFF&{xfsG^zTm0_w!49(q$KoOr@wAB zaFua=IU}D5r3(uS5B-eNX6N)ZG_zl|~kdYLv3`PdIY zRW>vnJodHOjHfHCt5d;!93ex1|7pjKi8pl+09>sibyUouqhkq`4?g}~g0g}h1N}PH zYPtwCH&M>&a^v_d^ZLhyEapNMOC+TYZn>673pb#d-?%X}@-$?49i&piLLNMY9fmGU z<%hoo{2bUiIe|o6JW-S&iELz`jM`Z43hV0Xs;%Xd$>x&Fr=>~6=1v`)n3w=w1YmC4 zHKNKuS63HMu%V$HC-Jz^!oPnXv$=AB-mRhG0_aIfzoA=<9Dq_y@ngE{3bw#a`~t{# z-{9S@!iEqCsJ#ggDNqsK|!L}SW7Q09urNUadZ)GtiQKvw2U90lgf|u0obSs8dRW(r+Cd={FeQ;JC|%aA;1QTrk}p4a)}V9C36(W9v9KaRtU zn1Pj*RZy^D%1NPkB9fRZt2!4KL+R6}PoAEFlGMKjpJab|@aBt|v=_RX$5OGlJmWor zDYUNuji~17e!-=!mFV>t$3=OHTPg>ZRY3fL6l|d)7ZEX$K^ggjm&{;lKZ2xl7~XOKyaUTMe2 ze}ZKT?vX12-@fVA-TGpcYLStXmrS81ZqrpPb#+PbfXn(G7<^uN44@AnJ^12U-pR7(-Vr$%Z`*gJ=a%lYgJqV!dfWRxlq4~Mq|wFq+5h@~l< zrem>Q(MZP*82`?#jL&h<(vvMG zJ|U?tCl%U_%(N;iR}qRzf2LBw?Z>VG02{z3KWJ$Q|3}SKu%bl9Rf{KpKXdJkruKiC zYD&xi*p;Ga)92anv37sI0XtDAClEA%%*PAV1bX#C zK!0h0YLAN>pnX+xa1>K*&PGN|Bh*b^bCjrE)rcS%a%0xeNd zaq+3QpMilvv#@a#gStU`^_ypFz%n<7nDLSV_zRcdP%-bR~d`@2DPv1YX{!(`U_{wa3TDLy05b!?Qv{cyBhOJRrI#mm#QQ zzgi9xz7$=oYhh)}$$XEG;>)NAVkgB9%Z6O_5hTL>2mv7!D&>M9kl60?v+RktQ>725 zGlQx(iq3Ya>NZE(+5-U;b#dVt)wAN>%)-J#5=6pps{@Ug0D=W*hqqT^fpQT9V9E9| zH-HVq@jv*MmX-!^i_o)kPf;qrlabRoKO&8tlqQ&mjLcIRf`{`t_k1$iOZqayvi#;) z$tC$cwfXqp18i{L+p)Au!ww{SQ(155AKcn-zIb!#vRB(LlQJ%{sy?E-IAUq=_d?~b ziBNxt1J?o?f;g*#%+1RSE(nqdb5Eeo%arg=_mjtslmOuk7`w8v5@3KRqw-r?(l)%@ zPLsv+>i-ufR#fy>0(ZQGgoI>fKMd<73HR#V^**xFGSmhz;n;aP#Sn`%haGDbxBlK;K6x4uc0M@U|2hd@Dy}zD!EgYsMm+U(iotA+_ zM{T#a>t6V4YLrUmj4*{uLP$T!-NImmG`d=8lAZM0GnGX=*9zeB)pS~#;O_h!Ye=7_ zE4(G^x9eR_%WL&-sl*J+Wg>;cfdUaS*)s3y=XOuM0Tu;?LJ38p+}PvI8#6PPQXSfy z1j)0Qta($GI)i(hL0RQ6LA(F4 zjD9$QRDSl?o#=GBPbYf2QEgC!BHDe5EYRU@Z65h#hL-0K)1O|*AdUb-5HS)8@Xw*z z_^6AkvB)fn4RD`_F*%RlSpy^dxxC6Uhe14PDnO}u!%X}nOm13&YdRK0sfMY5{x>LS z`}Ce^RwH~O;dtLk0Ynebe?XtAXV@EPYMu@XAPZDA0d|&FCL9J3mc3hCL{Q=4~9qk)V9|8yX5~+%U8tDZAdQy$i>6#8uWuE%ziBKl3f`Vs z=3a8s_Js$j{iusvVW<6mmBkd6MEds5dui-eJ90}SR(8mT?k3VT{dY7|UqJ97Og*?( zN>>fS?7RFp4}4|DN2(08Z=XLi4IgJQdAAznkLhY?(B89|FFFf`74lFPaZI5ACY(aq zeg6EODo|TfGxzngL5pKRL>gM!Yx5>2BArA{B3NNb$%$XQOFO_x0Mi73hv@~HRE(ob zdLYew{c_JipVAZdhaOELRiV)y{+>0LAzOtj4pS|fHMgIoGv%@*No98bYRZ}DVFfOj z!T{KVXAoTO>vqszxqa~S@4lt9QVYu{ez5>Cy?p#O*(_By6TDcvlW{ROEOh78r!f!v zMW<8Rj4cC=8X>*BI#q!7e!VjGE5Sd}@Wb{FA0?SpKMJ9(DAy5YG?CkD` z6jI-=Za&o-6--zI!xW=(pdMDS0+h0}8lAj)7>qQc`!hfUbochkx#$qj#g%RNQzM+^ z<@>aa{oJ<4nI$&=k?`DU$g)HNz4&~C6EN<1gbp_~I(TvL>OmXUeZO($ZO9yNwpI@n znOuF9;pcg?_OpCZq`rAtSK&F=;3JF>IkoIJW5p0T$hOtvK7Bpo%Y>ErzAHtd-}Ka7 z!Z&3;%RLcOo2T%%!K}}6qG{xv38#jPq3i7*tK66f#RoEQUw0G`evRjRzaIXHb4m zGWkrS9x#|IygLVr5+C2iXSIL|k{f0FtO$Lh({(L!@fM?VD~SDO1h$6G#QZgH#P>uY zmhPD(FzvXX+2m(-l(xx1SP>~o3~qPaJx;6$AQef3yx%S0`3Xz`+V8GV0RazzU3vDGN}_gl+s{UT%dd2#vMb_VHBFluS39BS)QxmvOP zGAI{BPX@8oP-zkpwIGc0i7`#jfO^qy7|x*Ny-m!gDM-FHd@i00X>9|#HPvMAi+^zs zvV&%U4h|T70Hk%gkV_um;HcB0K`mrV7Sq#j)cpMXe0-XN0mA9_UxIPmpJo0(L~+<= zFBhs>Lv1?{(*IiPh^z_M_(1$A=zI47Wy0;RisosoTs>3AgMwB61Q{OU!EV z(O63|Zxmvgmi6)8RNVhAOBPcm70J^iZNivm51xWvmp}ai{4YQ>+z*luE$Ln`xxIMQ zjrZ`yi~GNY3-3b+<+CS%fW+>c|BTZa=zuM)W(IIay4~A6phs8$XuZiTGlq5Cfj~%w z%@_Au7eK%_fd#O^_Xo-U1BXqjx|FT?mQrn_q~)qgCQnkd5H6y*~%#jST({p z!xcJ+u{;0*83Mc~Xp=-W;^%O+i4)|4Vz687C?L zjCY9BcjM<6uG?}9*oqA6w)*2wwJH7c&!VfnYz75}HtG|V(GzRTWc|>q`lqRSPek>_ z%%ui5*n;O?CaPX)NJaGA-t1e8TOHYZANMay!5{c)fIxt;Kw`_TP@UgO@+-^TkMpMl z_SiIMjqn58(@dd94o=sJGAfzfMCXk*u!6d~E<<|mn)Oh zu;8vzM99!;3=_`ozst@`N;&*bk~ary0!$$wZ*didmuP8=em!(A9T}ou~Jk z8c5rm_RF#<_FE!0R9QOp@X`|=C4+Om3bW5^x6IW zPXjehiFi7L2x3@`is-9w@w~QItw&{oxI43JT)Q;b$>R7R5NtkmAw)Mx2yaf@tMU@} zX(ny5_{efh5a;^?Z8p#ssP%#mJ6Bu}S#WBt(;zxvPXR}z!0u+VigYXI*0>2F25G54 z5vyVb9SVn%nJU>?d$Sf;R?G^`>OJSqoy$012_}sL0OR7DW^vutlu4c4`!_X@=jS0Qe{}Rxj z!r#!i^g;&{`N>r&%;kglVe2v#0;_GPmpkpQJQ+uCzEcafH753L6L_ z!Q309N`#8@A90|wsKN!FgOF+ib|f>~vTAC0&ETu!e|3O@L7=BkfXV^#*R84DmR$Q) zO3x##at_AI#@KQGUry%l<-jd^Pj{4;@Z!Hp6!t;m~OHLDOPDH9LZ$8QtnRDt*5#0>+Xe8~p5a8h@-Og5?uP?Yd zJODi<1B`f(&iFiF%%uH&biL0U*)Rv(U3Oh+*n!3MrF*gW_qB$)_QO-}g*W_9K=xYq z!8wJ$Uo8nKu>@%>VEK@U-rU>vtT8isVd^;-V z{W1uJQ$&&iv+P*zB6x1y4Bh{HtQ&PmGeRCM@o7qdK+q*d43Mv^Zuej>e7$+8(9*bK z_j=;!mBU6egob8Ga>%7Uh0Dn=v27uyMQopA&V6?n0k)|YNqcBn_eWP2$$xmKi?yz- zXkYKLc$|!5S4)6b*#}1YUChk&x_z2MrPle~NK54EJCG*&_S9S={Q1EX^J)a?b$T^Q<5;wyBUy>ZB}?W>WkhWGy(0BzD*u84sbPMQ;R#p6VO50n}< zy0Hs5AS;(ME!}7J`(q-pF0a{FHz6d5A?sI`oo^}J#bir+T`)TcDvSn>(&g1XM6{&H<;o+stEIzeO z9Rochj?JK7z{=llS!l~$L-VeyfsG!4m|Om$*ZSooWLk!4{%9abbkSohFs+BE%t7UQ ziO?ln=$JAU?31M_mVnw!>_Ln5FkI~nZ_hPF2@U-XJI4kgz5a6mZp-05d*#62l{aI9 zPRgvF$3*2DwF3%uO8V8;e;PSnQr+!rHfBQ&;}+DF2~}?VCsLn?=i9K&3Axm_!1EE_ z&c|>L6(ZkCkK^Bmc7EMw?Yp9*iO(tKjP)|Kr67m0)VDBvb&ES?B8__6A4FKOp|5-&gBTcx8Pa8A$>@5w=GR}I7;k5c189%hs0j}T> z5}7!V&wqQKmgwX!xpX~OR(j3nGLb@bXOG&RA<+u!680ZLwClsoZM{nCejZ37dg!U_ zuSs;pfEgRf@`sq=iO(nj@+Scef}V;xqFhhgxRo{fq>TQjD5otDy?%Ez*0 z960qss$;u_eQo(4$42$D3LHiEb@aitnWdryQn|Pw??+I8SE$*D@f9B__;$KehgFuS zY+$YDd~JGSsu3sjmIwxe_*hlOb=PO8k;lcDv|31=aI%c3(a_+F`6@2g%*+N+2-gio zAZO)Syi!Z!<=Dt$%j}dgDNO-@CU9R&K9^S&Rqe)cY7pav3>9rpUeYtT{5e(Z{T_5_ z>oQ@0h7b;3L3$I_!#5uDn|%YiKq3tYU) z^-Z}lA-52A%ZRuF!{EEb%qPrF%@j4JdjM(?! z&p6Vreg4N>3g(K%Kyu01fnHpkk}G7;TGHfe#JW-H#C+ZF79yJfqgyRldIE;w_JBs(g|F~1f+Pi)(P}nhX1F4|vdCmyf}8_5;Fa zZXC1sEUJF3toI=O%g%-8+~RDaYLIGL?C7=DkG-_`{xwR#9N#(4_uK|DrY67c4lJ~Y z50em(QBY8by|vMjL9yiJgqP$j8WVy*08VVnCeJ@xud-H-ATk<@dhcK3~5S)f$R# z^j#pZs}4UsXElhwT4G^*LX$OWH+)OOwmuP9XhE|~5zNnS*g7>K>NlNsdy#e0cH^k* z>v*?16}9hyk0<0Z1&)iW^D-%lfq7n+)pC^0<%uKy@Y)=Y=(0^HoG<1gjb~Z}v;4Q4 zq{#BJ^`3#mz616^DoZXpyxstsw4^4-u|)I2wvkX}ALoT5+Nct3D(g?kR_oag?7oo7 z)tWhlxph+gymEi9ueG;Z-rd{|6Gw)ZHsir5f@kF^iI>+W`XwLy$|`zC$o1G}inRw1 zx3pe=Ck>V|to!SE`^OUWWzR&O6!gW$De?6l9V^rW86$7U{N=TBlXxqWbH z#V_MU%m+F_QInv7mt=!7i7$e<*@eTbD(OxP;8_1>kq%g zvWVKRU$apnNb)hkF}GUNaIejy4A zWAiodiYhRgB_yLFN)_}7Q{M51g1$%q(JuhD>}VS-rpyp=`#t^rk4a^Sd^V#mbbW>L>n{_@JFl@%GVU&TUV9W+GVil7aAJYqTN z05&@p(}B)upljbG1CeeFH4iogEwX{$aa4R;0DTd6A~$Gvo#_@uBCE5;jN%62DQ0?) z0Eg~Xu|cycB9Jt=lwr!9#^ZW2%?nl6#y_`>)_h#wX|!5Fw6@W9el4(Pc!!EWRHd|E zp6IIH@?LYWIHwQ!rSemtmr`8)rYs)<`6R40!y_r(imUE^$+z{r z2+MeV{k4$1D6mJA{B8K~0d_K~!-QDL;g8JIBmz7%5?)OJ^f$9xNPFE932h3ysU~sEZS$3I{G8H;cn#8 zZGTTEF1HQQAOwata-EBb7|!3FeO=qC9=JVQ0H@{w)MHYudH`^^WXHd?z zx^*IKrb>+3FBep!_Ry!~EV7WHmDI`oA}f(@Cpybr0vE<8s&qk7YA2=jn&70CBmcI} z`W_c_C}FLDyWxXIyJjtpAN=CfP8XX_8bl=vCN_HnqD>aRE2jHUM^9+*kET?Pb~Wjo z=gw{|y0SHRG-C1bH2|M8Jn9xl4neL{t?O3~x|-8g1#6tP9wjq(65#ik4P8aC4brPR z?T-Qhg;6&xO@r^86t(BH@2KoQ)ZZA_6RAQj?*02VY-+EnQFvy=i5C!*hcSxcUeL9@ z!YJ_!m-!9%OBVs#_T*&Zq_Y!=udo6$M2Sh|VJFd!vynhv1#q@hNnq4}U$$0I3|&24 z4D8dBTR(o=+h>x7`;&{v8L04z{^c)pb9 zcGPLd_?U)<-E;Xtf5+6QZ417h0CT6?n7->-Bzf0}*H}xOWfS=3d*4w>*dAf#Lwd9S%gU0dM zXE~&UKxkFDJ+AY;X>!I??elK-~(Si(u68n5j=-BhX^ z_})9DxB5$Iu3Q~E#Z@s_2A`H}d81DFQmyUA;C*g*GmFscf1%-9+x3&v%rl8>LTjtS zNAhqVmE-O#HkbS|4#E)i#9eSjRk^V@kh0$8;@WGStz5d-uIvwCY~&lDo5WE>`pB!( z{=n+$5*Zqrsk>Um2?%k_;i|*Aulw?*rO4!<;u5U45;{1F&Gw3djRXdqSl+G&c{xxp z3A(usBdSXvkStTAKGkdplgk?TIgY3ZoR(&J6n=YQFcPT2fs}#_`Ejr#m}qqz^j%&a{u%Pzy=cUNzi&2b z+&PsE3uV8%B_i=(B$=a{vQ;Bksvs#wJ70{4bW)Y{v(Cq$J_|g_`HTona;BgesURu$ z^DfKT94$PDEAYiSn#>g$@3~$23B)Gt074tqrS#$x`5&A%W|h|uwY7Qh9u}f+ujcQJ z%?do&+`7JIWw}_^4+*#(y!3V@!>VhIbGTjVpW+9GVX-AYBJF#pdaw*2kooEGNu^Cu zIC37DE{y7UDRWV2rE2fLUrPi}@l$n{W!nil?U3dfui5DbyH}YPi?gk`N5Xp3p*tve z1|jIpFWl!#7Hbs(syHxt`Lk?2bF?EW*gXc;u!A1Z6%(Kp8cGp$TCyoe^Cr;Kbey$2 zP`MiWJkDrYjo=&8U`sS1!J4|`4Wp&OZxh$rWT0tXw4D?nauf7}&6tg&bZL4Pcd0~R zFjgFP{TOqTkZ>!k4C9m`yvF1D+x^p>58`J#4u=b8O+SQO&vwtv4J-XA}RJ&_ii_dQ25`g~IYUjr!X5CdKiK!5cbH)%y zg9S|AVNa&~bcf$rx7yP6Qd>e(6WI1-Wm@9IHV1*9KLfrbPlXRl{w3_EhBCxRq1m(@e15C={RFPJJ`I zse>g;|1jjN=sCQXkqRPveIo6AH7Pf7DePNT)W3fCHCQVmOn+ieGjlc8kj@5$7Mdid zm~e0W`)@rewF$~&XkFr5;1n4VdFYj&=UE(*TSsYDS@fG47cV3-t+7&QSB0iYL9qzt zt7W=%3FI!y#qJ*_PKI}yZ-h~4)^|L?wXgAYU4BH;;>)=n4%xKt-F9vj+1$afUfL~? z&Z;xJ@!juS6#(W84Z6Cre;mySU~jRW@5WDbSUcP2E6z5eVCcqlW1P-fS;bZ)*l2G4A`dmW{{Ten3c#A`s`=!E{xu3BIl;rFvpBH-S)F@( z+hvOz57mOS?m1X+zc4?qN4lp1@(|D1+yl4Pjdqr$KNaewzj8fGr{Nl|7qPNuPAqyo2!I#pqO+Uqbhq{)s$|XGU~St-<)^OT z&26FdK|2pGl0tNvKR5t<*eBuk^|RvSpO<-@*An;_lNAYhk-fB;Z_uuDHLrqqArebZ znzQS%MlN3}px{p91*_{ao;50pV(*4=0npYk$VP5cI#?*mI{7XeykxxgCFUjd)V)v+PPJeF@1J&Q&_#1!E4fAWhmw?$$>~7-_(HD zqp}}fKS|Bzz|1#xpjC5q1vUW4C%7XS|NO8l@iK`N<{tqRRMX5Tc0yG=*;Zq*%M)g~ zz+<&_&Hoh^ZUw&EUt181jpUeaJ&Zuw`mP9&A$97lx8{8qIEK`wJ_wkz8`p296b>es zZ^wJFGvt}KNR1>?<%FxqlZbs<@BR&X@fR2*Pyuk*BC+NGdIv$bnGLq#e|;>wbeA63 zR$i4@l5u-ino(#&5oa~Kt&G?WGa7aoUv0hoGcGW9wh~L_<);xv6=>PG!p}lcg@+jk z)#1FvF_^r%!J({HGH&)?oS~l!dpsBUs@lEe^THgj|4h&;60y*9`TAVl>4(8`Wwa=& zlp1(y>*>xq$z2+?*WvbM^XpXu+ydAA+2HdsdYVaLpOLf`hy9u`$S1`v&eK9{baMT_ zT+v|K{fHih))8+}%k)73^) zc68iP3RR>2WFco*8|l1lrHs?uKAp|6{#(a$&(k4%buAQiqR15*Zn!U><>?=LpUA-9 z=Tk&=$X?xj!?vw@gnGGE@t9T)dn7?ze0b0NgJ0Mf36c$sTMqgclO3H@H{LTlK0L8( z7LoMaDL{KOMpW*Fdbfzt-g~6mFqGD|Je<+ezSpNq()XID$0@BV#e4t85`ECOH?^VT zb9b(`JDe-h7O>k}Sk5`EXX&hrjekT*41QLGofJ z@y~XT)4VkvbvAORG%FoFErsxQlisw`BMb+yh)5@C2cBh}#gLafDst-1?Za`Y>0k>?_8V?K)Db+z7|KMBJ(M1iz&%*|rarSk^T7w^v z>JIU;3fW9*6r(Ttz>E5cluo8{*c{xNWf;V!2L8RRE zMH1MhMGf3ASD)d3Gip9I09h42xfa04)al)@yYefGgYl^vH`O$60rT5$kM3zZEY|7_ z<79IN{M|XD+37J&yDJ#DajgqH2ZJ@cTvoeaSgN^zzS#fI-0QyJ0qdSO2L{#u#}1Ci zRdzpw?ahsbBbV&vl7`bb8K}AF?Is@K4$j6N=(7q~80|J&uWFqx=uInry+UxWskX8Y?TG{LrW9{eSKkt%`B8 z>>Fh9BGD(~&1li9u8~#*&rO4J7VSO`3uQ`tqM)eB{CJlGx${Y$S8k&NYleuIt>mZC z%|4v+nJG7;{UjFmf@*=q>~*)z-XhG-vdd6L@rtg+u2&-CePkP9?rleZ#xa=zR*9QR zuW@kQxUR+yzJ<<^5u0r^9kDvULwzH0GThG@HiBM+Wy`i-EPP|A!v7$?3Sgb2zfzVm_I4T5(cKh-SUrN22vp*WKD}(zxM9WLgybe0CAIYoYzzbhgdb zu&jB5(Mg2aYbkHu#WHH&>#aAk$jztu?b~z2t4C8UFO?0>rRTJ7mN8W6^m({T#Khzo z9a>HKu2B(lyL$&wOkH_wj-2N^U$d(iL@^|1q+X$L@bzps}z%-F|m?&7+}?uimZVeKKKC zYaMewI^LITHRhkNKij&RxP%!ZFHRvd2=G}JDxRK-)T*wZR;1!a)noA)$`>1NlamR$ z^CNsb!y;f1*}b5yF>cxX@V>Mx+3DO^+}$XEV;X3+BUMFdZ5{9%10l^X{$V{Rx7YT^yGlu?jUda6@EC zitl->U*vk+Y8xQN`sSY{Oh98Z{$F9ebtdaI8!n4F+#ZwR@&pWnzV=D2VdYqw+LO|H zr3S}*^0$8L^Ub_2d#Utn=b5}4nUz>|9%>$_0~CJD=}x)!&`?dv!1_4Vker<;G&2YW zI3B{+H@JH3SAce&g?MYb^|h&$qQ~{?D)Kp^95KJ>{CFX-F*of*)>LYor78lqEPk@k zI62Es6&#%mDDo)abZ%8y5&dmffQPX?ewfM-%;wVJUdy^V-F&hq zKTP2~%IDigEixu?oR;32G}Olar_Q%8aDJC6bpy?meT))ver?9?=e9E1$dm5}9j$qS zazQjVOvJZ*TtKLrG4>=YcUDo|z$g3{C+~de=P28KM#rx8+G`>WQH%Uymt(vcIxE7J z=3BSP_i7fccstc`$)9&Sw7i&W`nqJHWjPbWLM4@VD|5qR){92`eCyD~#kbuHNDV#g zi)(a6GuJ2M1DK|=j4u(sd$8e`C5~4(ih1UdDkMBxBk1A9)*e2?eJBXJ|C^tfvk_+u zTZy=vZ*BRN%G*Ln=e_1t6Gcambvx$CVhX74RL<+~I!wSFcNXgrXApr}JH zwZ{FJgmc;>eD@sV<9yJxyu6Ao_64ho;n`7EyO zVK0w9q#}BM4d%$}CQh*2`9Zdb)L0*>pBcuoayc7_B4#>1IC+`sQy|^1l`(sgX+3Y# zdG|ScRhN<$*v=+AnUbjiunTxvv1vu=iJoiQ2|3!AA{#ApoM01rOycJ4i#3~mL=V#W z#=DQ9F%kBZLs=*;G5v-Lih|j-udp|{PiHQqEY7+NIhyg-Y<Hqh;iN2sV%F6h!v& z+KjgUym0qdg?0l`bdA|e;k*raFa3k_4a7_8p(Ani;ZERyD&w4m?Y!R6(b5i!?on>X zLz*nA^E6cT)h}&k%SbkN!3(6F6$IkybR9Zozn?zX>dq$MHMi`Tq_v@yad-3mG{@?; z0Rpif;~%J=Z(i-RExj5~=_Pe8?{SU>ilcxjW|}x2jKN{FV_IBv)9`D!ioUXQxksi^J!j z^GsvMMZ4#U$88NMxfPE4yYC<`vl*^7$VKNbY`u5KnFlMlIwJSj7oOYCzwtY3ds01_ z#zf!BUH)!+Z`PJuH@y{wcu{ZijSiCYf3f%0L2-QTzaSAHBq19`iTXO4ZO$aoY1*4uu)Zc7 zU+$+;w-D<&0QwG3b&IkQ*^riVD~O01V;K=nGeDYz&_mvV9{xryLl$^!jJ1v-thfY^kgVPp6VR`rSCLf+h@N z0>~v%*gM8BXrtH9>aPv&It|Hpoo>CF`h7iXc5$%Azjr&L334a*rVDx>jv5xM*q_~3 z;yjBJ7td8vluc6nDKHcShO>K(Fa_&X5Lmy+Cctw201W!IuE+a3p@R$zwrhRxEpk|z zKrR8x(xe?Ys#Y;wnjOM+#u&Yq`0o*@$JU#WcqXm0fu?Sy@biL}I%@yD;%#HhQz_>S zlI}I4NNFGNOZ}kWl=`hBiElkhFcxI-MqeT2zt-Nx6$N12A7iDI>&pzbxwvfp85qrH z5!IIiQSb3tlRm*)i*$jEBFXyfHz9E|v?RNR583^5E%!erFH$kDZFR9SgtJe^;%q^n zh^ zN!VBlT@wcQr6HU0YwLuCv<#IG&M9~VcvhV6jt3juPEG`(VYAr|t-j&NYpQwMvR6fY zVNAAiw2oE*$A;)Hs>*n`-Q8FYk&%mB7J+qo9QWOE*G&Zy*z6|hkgv%90M_H*6sSVj z;_B^S&989?9W|rUu6vzueo*@M4k3t)7dP^ocpOS(4XP`@oW=LEXo^)UKou<|E z#MQ1cB^>SSc<|8z%kTbhJU$0*$n{9QWE{y&R#mPy#OF(dsUlV!Su+J)n#A=!fBxX? z3I7tEB(}A!^EjiA;}uOhqnG#D!;8&qF}jQfM@2%SNbqv+RDSo0d~}=<4;6w@K zf4EuxST?lx*^%g1>G!McMI1~{QYIwsO}g^b^di@%`N}bQgq&S8JDPb3tF19E@Ud@6 zI{4q5-`TsM8*Qu{=oEl7biHjP{`TifzqgRd3zeDjsR(r}R!>4*ZQ1bMI_lZke7tD= zs4qJzcI2&E;*d}qY`Uq%t2EQ1sr*u@)wixx)@-?~(!zCvTV(F;@`jDHK9FC`F4$_W zByzVEwknLdb+Dqx?sf{^3Gebg7y^(`dC0El@ikOtT)SWH?;c;say`%EY*~ZDDKb3-en#L9uSt9?3U`# z`9L8zx|7^VUrx3-xc!ZKAo5alO5=Uk_u+l*QXlE?v;q~jV^ewQ1$cO7EC0^7o(eP{ z4~eX{>XwX%)cd+msnZ%iF*(#TF&&U*5G&LDsk5JdUN$l}si^9r?<~M$!08>o<^?kk zDHq?EhJe8h4r)eZEr-*a&nS+pImoPh#OreGz4I&bmIJIyb=%HgMMq<`c^0m{fmn1& z%`C&F@smIwaTSp|XL$uF3V}u<3<2;z&7HCptu+<_M{KpCIb@KLg4T=mW*tdB=1VIT zd<`v4{$+ccxZs?^oak`y&GPr?EFHbbLVbg5LK*#Tn%Qc#iln!`-e$MmQzhx&99R>C zY;^Uc3wmg@&u7H+GUk6W!IxXtw4{0}pk7c;33r8ZC#VYL{%pd9Kz@MX!hYAphN?ZR zxwBP^ukL5e?c0hE#ms8w88nLBLIQ%PufZ}EbG5LB(T=VF@b?ojN&4^7@6~iwWz7^p zKJKSvaS1ZwX`4?OzJ1PoIaN*THmKrSRQ^&Q;mUci^(b_cY;oQ+v`EFRw z1;VX_(|A4gzF(HO_V;=SH;!jG(FCzD#zUVo%KXhE(vjZ`r!jxA0&Z{X%rHJVa0oVaM0D2o9CVM~*7I zJZs&?5m;JT#7#EZYUiH9AM|YXGQ({fCCKjE*hH$Bq|S~v4X*<}j%gOEM~Ca>F|LD~ zbUde~^7`csqg0}9ZzlQfc#l77l&xka99s-!at2C6yNJj^KHdFWE$Q%ee-CT`$JqJtthswf z=1L#uC4cOl@$>?nPfO7M)-2rYq{Y}fY=&Z>_(#(V$RHB>W0pY&702T9{-mLnz=29}hV+j-gkE!km9Z5|>B;ZbcS z)cBzoGr^%O6I$>U42JXC6>d%SmM9y{;#f0PSTYSLW{rZNi58i1t(v+;)0!(X%ax~2 z+kcFoDUSY}`JE8Pa8=I#i&)UZNjh~*9a_l6s|;yDHZ#8i8EK?eo>i@akoqQ??1~v83%{ZLYh8UKi+gl z7?aeGo^09j=$2g_#YS$f0UqCxS5!dSSFf(}Nm&51zT?*eaKyiMXaH}&@kv#6BLCZQr7zdgM>HX94E^O&S0!_KH1xUKyJn^l z1M2X>VE=V!Zc6T&J|ooVYbTdv%NwPP^2CPAf4sQ>x~3RXct+(zuo@~c5O^4UAU`_= zqQ)R+ZT#bDX)(#xN+Jl%&a9xgsV$Ot2R+*1VB24)Sr48We*}i9oLih+iyp(*ac@kX2X0w;KDNdp398xc z^0QIurRnv@Xi2mquSxbq%)D*#{A9nZ{{6FEggI(q=y;${m{@O-UJ({H1UnQ z{x2kH9c;WH`u$xRB%zF&Z&SQ)zE?0L+;Ddh`}d^nhFu)rZH$nhW+8!_r==1h;V75AT<4J`?_@*@S7Cw3qX~o|dRB{B=6wp9=y`X5um$g7n?)-k4ic69sm! z2^n{v;Xlrzin01DcnJMp6_$IjNEogo;uh^)_$|n|*oaZB&M`iP5yvvbtKkB6I8jnB zKSojUoFMwzn~5oVELP{mrA)}K967S@D7wG zE59PZchmaxu9|jyjfO;8-?#oW>E$7&j2huefH!S1fg+}y(q?SJ-hYfKS1y9y(Y9|Z<*|IqQn{-J&}Qno&^~@Pzr|P zKb+1RF8a3#@a9xI5-$AF>S=DTwo+R-`-`qY_?PM>R|rb%&jDMa6bGwt@C8gh>uC7w z_1;u-van0}@#1N;+1YofC?KBJcQ+H68~_obnZnYU7>`tZ<&5AQovKx}R)5`r0_}-Qz}? zU7U@6dSoeRURxitw0oP?SWy#S+A96vx%rdzK9&|GkWTyGbWdiA7yp3z{|jST#CN9- zpYCT$T$Z2wNIOqO>S;60j(nkTPs5wFtNy2ldzS6N$gn^|jSE7G%eMFUV#!MPYda?* z)7u6*NQNqQeaRb?o9X}tl7QSg4q9iOI$&lBq?+qYlM;mG91(DJB?>rDdh2l1H`cKd2O zYGIxvM}L76G=9YqOxCqrz8i^0)xOyKaHWCiuz+!f;0#p7Jd*rIu8dE`JvAcihEbc~QihBhN zE;3QdEYq%>rB62&5*G(;9az>|V^<({G(RHO{hsRFE-@cVd*`W6A@Bdb zwsT|F zZ5A?E_M@@~`fy$DM48gWw7MCnr z+&8Sne^SX`{4w->1G7iPHu7jU4f9+E;dkd;w$;J`uBzNW_$faBJLSAFbjg}OvCsyHfmt8Z} zEP#wrcv7uK==1diMw;l~KSt%}QBH=mTy={h5tC9}gUl)O)}Grl_|sJ_=nEq?CG(jB zTUzf6n>7yJGu3M%`aw{7CBk9mYTf7;6jm=XdQca#37ymi#07;%GAXhjd$>j9u>kNjK!-(l z*LGn)B4A~!8W}3nTDW=(lO~#8_0Kfj@5Hpo2o?{Yv`ptp+kAx1gl0B$JlbWlP+}RD z)Lz8`SOZnDjS5NyjO5L}Y`j~FdsE8WdOFqb{caq12avqH%0m7(B%TIW(qHfA5W&;aa9C#45JK2KZ}orRQ+5|)zEt| ziYEtW&1P^_yfqOKgLvfo%ul0gS?~A;g}p7cFaRnlpAa^vz60vJ366q_^@SpuW_EgB zm(ypSBSdI11X(}n40noS7XzgouzNn~G&&DkjC#Q)zZgokO9xmed&~Nn)IHF99L`MX zk^zc%n6T9M{SL91y!qQODb@@H-rGXHss4JODM3vR<1M;CbDhy8T-Mu?EYZp4@+Ovx` z{y7Pla*!z-Uly5l>cu_pmN!Vfam$Dq-r*DbCI0>{^w5dicdFrt&akFt8z*+0Tr0IA zmBH|!hjV(w!Y)~5K!8mg%Qfe~`?$`_Q% zRpQylc!&z-K=dTc{gmMT$3cS|38&=OVO7KW_Bs(p#oOkdSR*d`rUEU#C9ADH@=&Y6 z3}#x<2s$p9*d=q8}A zgpp`9+({~?b#KyJWXtui18$>9Yg^pUJQb#utmkUD`Z;2~gNLl5UMi6K2f4wz>lvNe(AsVsdYFU|4GnfaN%`f{zM6qs9CF9Qf%#&qU z^Npt}+9g*HnSAwrsK`SDb|QpZ^M-6nUqI&U>cvZ*-PdW5jF@pyS4YIVFr%io1>0+i zS{^MaxtWDw)|}OtoR{AGR(5sUb~gt}X_c_eYY!g{R-56D%CWou_{CVU6Q9{P;&X0y z$|;sf17Rj(ejE=zi*N@eR*VtM>nNOVoq)~ZjKpvkDCQ8x8}@p0+)9DpqJAsU3kvp= zn-bBn*GarH$yp^VF4#(^+?$p2tfIW8-JsGKtP$Xg=D@FGQhhs>igjFo)6$nYW~9KX z!+uv<*e+d-IWESHp@+J*;VkSI?QIAcOoyj%#<2;A`#d%pcjj2XQcu++h)9KziUxj( zjj4zb@yk6_w>^Y4bi^&n6ZrdE2EJ398hH~MC54YBub1hHF3mY!FkCA}{W&?Ozw1FUU7vktneuhrr) z3k>cj)a#OqHZq&iz~wvtl%c*boD2q=TlPEn?U+{I*qNk_QV_w;nEG@E>;+N+nf@CW zz{kc+V9{mI#~?1q4i1Y~V~nMT4_tsiO}eIHB5}XVWh6&hA)5H}`r%;rHuquo5d1A> z`-RY6lC@qcV6j6Z5d^+ev(vX*)hGUzDAHjT9x;Wxs>Qb$Wuz`|X5QDPe%h_^X&!sw zJ=Nv!&@G7>i!0B_`t_43H;T0BztuZ)acwe_C;YwTMMa2%`~^l#v*E*)nu2Dou#~{j zO;>#qi*AMb>L1{tMi0!uQI~mP-oyWFL-Wr?b}zx)+42=P zTOI&l|Kd|-^cR@+;(Bz&a}LHyx!}n(`yqU0h^d&qI5i$f(J7{Q+I$CI&X{^`>ENii zu0(ps-rSx01upiS6UvgFGOzt3FJ(D=d2mb1(!~Gt+<{0~M1#I1N%jmIs=d!b?Z#zwQH$H;7f6NczP0ImC%w?`;Tac~*E;^_ zKLOL8JIsfzTu~B__VaZv=SgY@=wNNLF^Aj2(uaBD%=)H>Bwe$ZkvduFH{{oQDIPoaLV^G> zADjBnZa>jjQzkoS6HB&5o)<%Dj?1kGxe|d2M+(DL(PR~~AAuUAu1zjelgs*P_6H_| z<*X)W{VnSf6_oFcF1T=rxRh68_;B2tlo!dcwt!Ag z?fvxMkII8c1(Kg0GOK42i7hfQJlB+Qx;MsaQ`QN4eE!|g%E`8~{=4FHTIuOPC zXE7Q4sEb2+B>2lSeB1b@rvzYbAjzS3|w}Ml(jHb3IehX; zb`bPlz%Dr>I<@+rT?Z*N)3u@Fq<=7!=I`VMa+9vUED>^1jLc~eaQ4#8$?wiWXJ%7& z-K|PvD_=2K;B|FgYveyUAW6t8618{6#@!5(={u$?9z;k%iZ(B+sSG`XN|6jT9|IN7 zHolbNNH;mI_QEzld0u1{-oTHp@RV%;%v1f zC_vitve>7)d{O#h=R8bT8z*iKxb!JdyPRPHcORydS)>VO{&YzVbaCC*deC&27=&9& zgDm_rq6sH#BYj@A2=X|NygLP)1e7fWB6Sj``Iw-6Igf1wb#~>|KO!8g2QhQiW_vPb zVBg)Eb(OpJ7Z*zf>tRmeK^K>)VwxF_yKOukV63b&M$ZRLqq4G51&xZfzNu4U0&YyH zddz9T1c9D;wLJ~3W~p)(KJ01EFesxpkF1&!v#)L45wEMPWNt}9R_VZ2mHYJNHprnB z)x(^}XlrW!ZD_Xsh7~VwN0s?Zlq#=*yK2)l|NBkOryyx7tj~Dn54FN}zao{qcJn%lV?(T6&xoXj08w z*5JPkf}rd8@GdR&>_|&3s0Zi?xT;C3MZBd_ z6`;RHRwNwvt*bTFD%WLB%XO)g`!I>@_!c7-LF=7mTu@c%@{&I_xC+F^^*6Hk z;N3s@&|y9EZRh6K_09r@0omcI%a6QFeaV@1wk1b)0xIdhDD}`Er!eZ;cR+sUDyqYE zl2W_=2Umj%>Qq%xWR^=#P4{`+!7;IYIBTDx&wB+=$uu4{IN zsbJR9+ReS-WYpOTB^6Au_V}jPId`X;^l#rHUCvg7by9ZaA0b?Pklkap_q!ag%ake} zMDh%-K{yyZh~+tMAECBu73ENIzhYQ*5 z-gR+IvvD0iX$PVyv)|G9-f?M!#U?pSy?5`bs@=qV^~Y!?Yt^Eq5FK1csT{;h}78LT&wrZ}7VzEAzG&;D*AbxSip996dsw5CCPw@uD6=ji#&?*}OX zwv!ay`h8}O;iQ11&wE_{4-yC+@3Y=F!SIBgD6saE+U7Kv6mj-|t}%Sw&;L_>WA8@% zsWOk<&3<8fP%ABvG<~smYscHDY&m{n|MQKQa`V|;Z+av0ExlC<5s z?n@8E2s$R5@+D<@^`k5tUgN{0BNZl3x?e_E@C@gR)>EF9b_H3M^KXTo` zf?h;4H?yAw%W_j|`|#l<_t&?O@675k6Exq1d~$3Y`h;ZZP_HI1@*zV$RXnv(jxp9p5r{!KjQ3xq%6Adu(K<)S%N&e%h2VC4yzMu8 z$TYewE8gs}YuBefI@+6CR#~9?uu+?2MALoyh{v@%8G8Ad_IPZfFQ_WidUkhJRvi}3 zEj!*e9?s}JB%XroFo%wlayljlRc7NP=m|5*v3tjxE4_3#Ap*|mo7q{cBX=1b0`C`~ ztmgOM|AvSaCafejDad^WsQmVOxL}8_{LAJO6S=F$maD$ErU-3(v6*cImfFUy2nFw$ zGVPm)rMbFhNGv9bm){4xj)XxDKZnF-P6mbya2#7?dID>tVn0h;s_&Xczy_SnUghLP z*h`K3#LNOZV3U{VauL>($tQ9^+~Yv^kVE?@0y||${#}5JsFlG)YNS^DYTf;91||aw zt>uLd+z9E%4~sa}Ht_R#2Z2N_ZR`XkbCqK=0IrqFb}kOir%A!MHK(r0G)3XyxZV-% z5G8eU>?J*gJAoLZr|t#9W#lK1()eVLav&HTqKUzC z*5xP#pb}2oS^SxpG+sHdh83tzz%(gJnjMuv#^k8NmvFhBD6ktma~O_+9e2AJ5_ilb zDW)(8)$yYdtulejzj^1*XwChjt$}xk$ol}@B~%76KN0(LAHMSL#K?=*A-(Uk`$c!7 zGr*7b;i0e5T9k=oA+QM%gw#7+4e!(cn`euz*7UGC&lEnc%9>R%s(7=ZYvy;BDz78U zHr>N=M{nTR#33}pWHZ9=tf^WR4-SZhoLj_Od8i%@Z!DR*lE$)lZN9;&$5>JdP94WnuoM+RwV?hgx`pHXxNmQZnG2^Aw`ElKQ z5|k5uq@zstUTfz_Y2Z0cXCE$>#`j@EY{qrm(`7o=C|HvFiFT%y%%9_9Lbhbw{pzj# z7yuh;$s}uW_+H8dx4pR2YH09HOeO%*v4(5 zqW)Q8B8VI4Ag&1g#-pR>eD&Og&oH6+YsJc2D9 z_d}T!nEfl<^#usjw%(r49N*sRv~qHfzK6>zAu?Yo$-p0-E#f1<9lU5Xv0o8TI@Tza3U%J4(3na|%I z&WcZvf>#_4>b4ZqZQ@~oUq|S&#(f&*M`H&m7|T|QGuGKJ=~Kr=6zOO;Q)KVRfFQRmh%kAYm zwQ`~o>0<{RMX~XH!Qgc22@4@`Iw_7<(HKu@Rd1Md|>?~huKvp#sZ+1R8l8K;hrs{^KU5S(22*JAg%4(!||Pub~*f%vbBY+*hB(_Y&) z$#2Mb_0y-WRZVDVY=B4{Z5L0=#ri?B1SU^STgYeSTKnrHnY`N;CLQ~Hn6A{b0H>+* z`-*G2k~dH#g>y0rR{J`0pMxoZex(YU-0dC1;b9R$t(gOyvx&&%usi*EV#r0Wtp(!Q zv0>Zf(tiE&q9Kpg2X{|T&$~^@g$D;J4n}hf3{-rmk!_%H3S5*fubYUq0>j}ZW-W8r zHk;!(q%qz-Iy2eZNw(HDH{Hdb-5Rh0qMFrfo!sUZ=$`<*5SC zh;Y~Eo9NrPGyNNn9VaDbRQG=y}&g&l6>p(d8%cc*GF)2b#J+7ZASc}>T)v- z$pYn5fO9ywF@Cy1ln!=8J;4|`2;qfh&MJL}krQ+Lh_CChto@mwO4D{kc%c+2^unJO zBnN#GG!C4orBum*uwCIB4lyQvQXo5Kr3R{6)@s{brnb@?ZL++{HDc3FECd3}r#7us zAMbnT-zUlWXEB8JYd3#UuK!8poa1Urcwmj2mY%Eno0IQ4ws$(Dcm(P&qi(vAlWVk5 zM{4@5H`;@`DLz0-h}}jhP+AnQ?^;oFZQ++OA1Y?%R(N8vw3E!nqd7>3b|*!l0U>^y zQIyQ)o?=sBZt%4EtGI*JYuOK8`3xl%nH0tAn-vd!@yDd%&#Bi5!p2ft=aF0Fjunv} zR2%5A6vx!7m8?=yAcd7o_wW#jxY1@@fY=9qdOpfWX%+FWQ~<=w8+{=hw%wQLkbr_D zf_qxfRb%{#M0V9WY8^$Ffm>|ansWA$lF2Z>H?O3R`%LD~7q0u=Y4A zsc__KQuiF$x>}MT$2(Z-&$VxJ7;MZ@PrM?^aVeMn9}jZ>2v{!X7}*HO&RF{87Wge| zZj1GX)q=@^MLEBynM7hKRn5EDL^KaTu=xD7>{i1v2 zDkBl_Q(ObPu+xC$AsU;#Y>?{whGgCZHiwSw6YgkMtH*8iZ;rvo)LIjXkCcG8ukx^O z!g*=N{Ou~6d1u3w;jcr%Dgn)IZYSSs#oWt7`rS?~cz z@PL|GKUP^RImh-dr+eGqK(*lCRHTfLB3fm}eQcan%9(Wn3WcsVBzUBME4ChIbqqM3wuaX(6xq~nneysBKQL&W(+2;xpcyfXKL_IN z>;td-0t|FO9lMTqPT`@<`U2C!&QVj{e9XLT{5zYdamfth?eg3AV?SA3)7xUN;+nn> z#iO^7(@@#lL$34IuTwLBxO^#RW?MhRXnzshxnn0=A1Nmz`*WpW+9%uXg5L-yBqp$Y zdovpxavWVgN6LJ7$Y)&QbAQX?nrXUW7@@r?9CjJ8PzBr@7~rd;m%(ehioAYaog5rEB=X1$XC`HuBtfDAucAix8{i;3Y{1u<0S2veaJ*paVMRKjs4dW+%d>ra%c#2HSW5e_nk!8V%B<-LEAwpZ(_!<>U z2(sR%_;J&Iez=^_>iC8hDpi4q=@63>CC2^0E%IJr{WdBSFL#NQfImQU2L z@77}Wz|i-a5!pGPTYI;n`3KQvpYMKNC&de=(&^gaJvLviwD{*`(=m%Z8DnP6FD}xO zEiczjqqwb{naigz>r6~*pokZg>ej-R@tK;NUZd>2viPsa1Pm5LMlU~m^mzXKg;tsF z^uO-%@+F#wI|hmq7P93PzQ&4ug%!JpoVKH&kmLS`0`mV!#V_E7hx;>>z==vzStJ%= zV9@=75c~amWI7&XrBVKFV*O7b1v!#2qlt`60CC#8t^IfxXc6K;>9_}|XHRl#8Lxr3lm5}13BxMEe7|hQe)6vU*VclD5 zJ4{eo^<>xWh@C4PS|!}R`Bm`-1!YYXNgYf$JZpd9EiwK?g!~w8$WswWq2g1M$xsa_ z$T7Uh6cw9`lh3v+sJO3JUqtvL@h5T#WTil+iE?@3jE5^1wPnrf+WM-67O&+aq*RLv zPmTAh+p?98@779?l${$m()Kly@}MPx*q@$?|ET9;VIZA?@f+15^^zeLyIb9)f+=vG zYFyvAth#3Q1h;7EP@C$~zJIu_QWv`-A7QZu%}?Q%C|{vs|Na`%3_LfV43o>FJ%eD+ z@5*(chsz;Hep-KQ&v6l(Z(=H|YhQlfCzS2mh!j z3z9e`dn`*EBZt3Bo;kyw(tWof-xbByZcGxR2H7t``dQ-%-x496&wCyFC&cbc(2=k2 z)!=BLcBkdl6>{Op)6<>@C8OSNeu73mS@3kdl&L!_Y=uP!8A8 znCAd$EpEoY)Qf5pZ{HgIxoZX2{j0twb4Z}hDyuJ0X^GFZ2{{HK|MPON_n%h&PtpeD z_J0-U?Ilm_<%aqatz5Sjndrr94LRa8Hz5Vgy?{tOAcs*{7$_{9T~dN9#Y+LnivFj# zf8?r>+y4uI<$o#q`TrO8UyCOHZ(&a)q<#Np)biF%gY2I+I2OXL_CGZ7|Frk-f|w*S z|BVaq?=J9<_yThKuQ^O@|Nb$IRQH99yJqq*1A`hvh9)DmOUuiUeGg?#b~5Sr38K9; zaZc{tBEKp3T>?#qr4Of?#ZX7$FYIL{B0OJCO6x0pjG?tV!|u zG^0c`x+1TrK%J58p@@hoOFzG3_qRDa)%W*@JY}k6)%P=(j#EBL(!RRPPW#)3j7l)4 z9O-bC^Zw`w@Ksx9MXP^#?n3_Xy9cHSv9X`_u17=@)w>|Q6)}gDPjCvq41UY%g6`{D(~@#4qEOFD#z7-&#+fixqQK@w;a7B|awm z$QPNFusxnodM^EV7c;uCvh_aWGBkO-+)%%_seOCOQ(sVJ*wk$mXw=)MF!W8Nti(3q zRd`N?Zs~gKu9bhNB1nCp{!z>}?bnR1@E9Y-SET|UhpIdcoa@tlXQWBczuxV7))uj> zsyW=utJX-Hxoj6cg>J=mP%&bSrpWZPt(gT|L=b_}fXCrM(RL9P~ zb7~rS$$eXbRXU=vq0`gO`?0V3ie`o04_A3J12<-8ncMv7vNbIw(oI|k?LF$kuiLaA zZqBM^i){^HMe&xIW~TKE-v+S(DzUnkRYI_53L;^{4?0%onHk&4%o}@5Sok*8-!D6h z;d0IMfkj%QWiuMO0!no}P}e8aIrWsjl18H{n;t8H)p^fJjU*|n{T9hx)UTRoB*0uekf8k=#ak-9Tw^`km z^rr4u$I5cK$eA6^+pFUiRA?OMb4oVMb9He>4lIt}L)2OkVc>9a_m#6*
Y~c!QKW~RmdQsHK02qx#!*NXM$;JzeOz|zniKCvTnC!h6D3C|9)v3kKsFr zX>t^K>O4|%*58=YB4=zkw5JAYh6cN3Yn(%a6t3L4dfM@klkU{k*=5R8a*T@#QUPRJ zGgu!T#XGAP62(7D^k0*0_Pcc_(D6!#op7 z#mva$&dhs<__C5q$s?*{b_)@gclx_ z;b^=nSi~zTsjs=?RXRM2k9fSMAL1`9wUT17Oi>&Ckdk>`Y6X(*HwguUb1IfpRVi!S zUc0*=J|)_Vsx{@6&ZL30L64FF4rB6sZ$wGKFri1rK@jj0NmCBMA%X;C! z&7Z2ZJ>|WmwpV{(x_Us)8CK1t=2xU02Ko=ab5HrL4{c55lzC=uz5}cM@$1?wPVSc; z$lY0Z!b^CBZiLmftTAegvTJ&Jae7+W&K*gnZG$Gew=eM@dgSFp&fh3?7-|eD27Op9 z42)1PR6?(vPH^~Y$X*ckFj`wr$ko9ha!}5h1bB;7_;U3fv^OX>xc<#LK}gDM$0WzA z!CQJT%figy9aKtinTRIa9N1ChT~KJ3TiKaUYvR=cL9?*gCvyxs#4C~qUlh523-#a< zH7HSb34p0NTy6+mX1O0frs@n^MQ2K?k5*>x!C;3A5AxL`SMIJy(|K@4fh_QG>Z_-P zIHYSgM)O$j`Ngva$Gd>BBI6T`_%%$rJp3WACz#v^-wQ&))p| z$i1uH`Y3a;r8BO5S>TL@l$B8Fpf*=!@#9CnW)Z&;G6K5kg@CwV`V3QQU>2;{atK6H zhUf+*>95n;x{@fm%WO{6%`5LL$#GSyat`wDs;8wcbIb_6e^h@4>RQj*tq$093GbBW zGu?(b7q+bgfwgxAePoPkIJBw?<89;YMvmIqJMuG#JEAhXi;XL7z(*sH8Qe%?3(KzKANHr`L-NA;+Y~so|-%{e293$m7nKG$j4?-$JEJ_PF8-%yrO@+xZ%I3u; z->OAZ*Q0YDTC;7oCtUr+(6RW&CY)ev+1H7CcG7G%L?(Row#5xiQ)Vr-ONW+{koJlT zSGcpwkTN($D9G00%g4+Ig$eS6AbHCV)?qQtdNAdpOM`ehMvaW(>#!!sQT}C+)nDfk z$4iPb#_4kZt9kd4nU#X#=bt9&yQ3zs*P;LsM0q@PXeceB8P6g&y)!W>ZT_0AsvY}G zqP#^(A*4uX)Pk4pS}{sg_QGJ-{0LdH0SNJA&8w=|I>-9d^3PNB!%-u-1p<=-5$`zh zc;B;qp#A&#TQR3vj|9%g^Ay~~BRJ7gD3 zQFHR?r-s2T%+SBD>)NZF;xr zeMQ-?Lgq!WR;rGwY|RsbT}MzJJ&O2y6`hyABu_)5iIH0CzZd{0R45Sy&&nk=plsfuqvDlQ@lOZvFbjm)GVD zuThNYKZy)1qrN%~O?|1K)jxrpiI7VH=smvVKf{|9G2fra&wqxZ{)_cAzt7!1o?Z^^ z?<;da$g?TA0E7IB0#T-o-?HJAycF!3hJ=+3nUWu{U%l)g+pq}!SjNR9nu?>vIONN_ z$eWT^&~dF3+}>=5D2F~Fo~k=*4Q~ef2$4qM^SSe*CtdAhpF&m{sR^|D+(q~`9`wDp}<#$-nM8@ymZ>3v@ z^$wFW2l`8tpN_NcI?*X$`f)0oOQy!E@hk277E z?-jM)AT){zIR7wy?WzK_j+GDjY1wxAcptKN#(d~58Tp>=mGN_d*%k$x{ZCXjbH>WL zCSH(9uzC0NFL9!!kdL_Co_h00*VC|gZ6Vkj6#msaR0Y2~hY7;votppMZf59Q%Qm@@ z{Qw=!iW$#>{AQLluc_*_SQ5M1QCfI-&3&@E4<=e+{}j*X{EA3w<`~wLyRStjZq&7Y zEA4`Y<@EN|8-KemNs&$c1NIR%4|<5tdeHMr3x7CW8a%7|eeg6nFDJLxX{+Z`Y+2yt z0MRB}0GC)?b`fK@63cpgZUh3i3r#0aO_j|xU4baDeTQPG^Ht{;l+@}>PDT=wq9o$6 zrY2VMBl1WQPP*U0W-pcS-!zR7Mkaud>&7zrZ*U`e;JTKAbwZi;`Sd6ac9n*jU1ZrC zDA-eWTAUPBx~mK&y7;e`P=Jb-wH>Vy?|!*QoGROJTc!E>u;tUMFuCohWAz-m1RzS^ z+NS3d3DhgKlb5oNGTEb`gqGA)MOBem0gLXW;E#Xf)LONh`ho zRtr5ZP)J+{VM`>Dw&|vbj$$w?-AjYwYAuU`qVRS6`!_@j0OjX3|81%;F(iUvKKK$- zf!X;BKPw78r|vsm0p(Fy5f?|DsFKajRt^-i`NI;in6de$eE-wEnbUQD;x2(lb;-_p zZ4{KDSBoSy{^8+SJscl8%}efx=;$=GPqkHoNW{0u(3um2dO|63<_EvIT9VzV(m(3H zx3~Ut^{kI}5g@;{KXzgnghOOn-&{G%@4jYU@&ID3^W|krj zx+6(pMmF51O{~9l2^o?trVau=MY>D%j+7ARHAf$I( zp55|$^DL3qU6J0vluFMcnfp_)RyGPa&{%ep$*sRF^F{ZutDaJ5c60P_^B@bWzW#qY zWX#tiy~?JWtif&4gD0c<-O`qMX7g&4lS&#V7;18wAzePaDMlm0_}u#R&Fa+X(+)w~ z!I9^CgB2lwePz>?b|dzhhKZk6%$-T_m}kKy@Sk2D9Yd0^o-2^8flJ=tOTc zgMk9~U*DMz*rMwH2Vrj=6xX(H@h;pU!JPzxySoGp?ykXIf;$8Y4#C|axVr~;ZybWV zyT9K1?EB7n_uQ&iwf>-*)wNnye{+uC7;}Cx*)jimj=#|Ne_a9mF9bw{zs4c(Gydbj zf1e4E1ZOKSRfl9A>O6%ICsy3h+y8TVVA__PK#@XKa>q;GK++^as<7`LXMwAI{>t{b zH;;))B9+j8U2VfPjyI0FE_DIN)#m;It@HN0M6l&3`faxTS`YZ6@mTzJ`9$gJd&ug0 z+)Ac3CN|N=!{+sY(-UKjgL#%SM-cP&8jJxOVV#nsVp4hNk{*pmb5UdOKC8yF6eJ*VB638xp5kby%67LmeA$N1MD8 zlJ@78{^uKCQ=@YtB>>=3<>SDQ(_NEI1YUv$GB(A72EyV|?{x7yFG-qJ(hYI7!zXIJ z3MP-C0?lPLr>uWb$u7Rn)PVx665ACuI=5zY2x701tyamY22`Ab=4Gyl!iMTxn(g7@ zT-pjNl3{OQ1s|9t|B0-WZtHb5VcrVltM%KRd&X3NQQ>KnNTfcOy=XKqPQG=T{dnVmkHhUzTm(@|BleC zx2H<_;4p|%ibkAYNOBwWu)r@zwRy*wTLm5f5N-wzPrjm(h>Nm-EE59vvxNmyJV2;{t?U=4^bGIt z7soy2C!?Y9@0m;&|qe{Zi8bM~0)mVpJ7QF|rBMOGrZxo7-2b!tplrh{uRs3|(p@6|6SR=5HKx}2P+5_PSN{Kp?GEFMMH;v%u7$EM} zkok0_w7m+yw8gxR5os51{JBV;-MukwnDdJ^xp?bOB?1uhnC?45&*MTx0BEx@Y@#iK z+Gaon_d(iSMdQ?xfB?GUq)#OC!EaEO+1qsB$~jSw&1BlBr}(uncw}+kg?Vr4I=j}S zJR@%8M$Td@{?zL4a!`_YEA!E{sl!Ayyc8}!Bd^CU#Ax|jiNZ-W4M=!u)($eqji%Is z2iQiR($DlfI__+9d=P|cw(cHcg;$MWr8bQR5q~M-EdoU=nw2-?^&7^;6KxEs)_K>? zuW#7+q>P`SQ1XH?E!m&^ZLOu|>VlgbYzaPslVQy0c>#iOz6$NEi$`@g9g^+dop~k0 z6(o9B=ld63(8EFg^z@}}tQ&aOq_ZpjLzXhBLIQ{(EUo;?DB0tYsY;GIpOyKRRYa?v z4c$I1b{4yS}rp}%--(xLN_y{_ID0+SC8}E|t-}!mE$|DEYa~Gi*Fg*~_ zjjNr;X;{Qa^0vz1p5Iz2wgmE?6tjGT2H0bweaP7B?d^+k7c&Yv);EvN8wkaxm!2bv z>9wCbLQ+@Y?g`-8nS|5hJr)%1G^CG2GFv36Yd9k<03iSH(nFD3d%ebT=qH1oJBz>^ zZ{5#FEjG@LbEikG&cP9FWtM;#ty?+-=^k&NH*8+oG##s#SJckVtqRZ{7#Y}6JSxc? zIJ>LezeKli$@4uwUu%gh8sN@tQ<8(a>K))Baf4D@LkTHd%5oVAXd znKE+}!^w$ed&tDXT|Cy?xpTdLxs#O$tfT3`luM;Y$+~>v)lP@tzEq;)bWTyQ_G56O z$kkT85{SP0m~}_p{UPZjW4fPub^h<#sxmTG-iD4hv(eHY(muwBe=|2V<+wGJz-eC> zRv-GMMvy61N?PzK8r)alRK3{kGI-pMPvPuX*>K>yzaz#5RPkuZfnU%__z_a!8TXhs zJdbK9W@#c>vihdpCY6R?iTiRJ?Ee z(ITDBxnzZ!SJ3Z6w-%Zva)M(`OC59M#PU&;gA?Qw|97jRWf%X6=pboN7m=7sc?;0TbBYkJ{(V9v_fiQqEYSQQ_Lv4|dM9r}t$`VQ~4 zEc9})23(G`sdvb0?c>F8e-WqI`=}x7EK5nrr0F*@u>gf*l2U)gdl`jgwchk;~U&`DN41D*IA~}g(kPT zEQ1s9iXEhwwV)RzmK&F4)Tu1vO~q+n;B%O^{6bz!ZJx z&UE^u>?FG@2oUY}`ohFrlcK7uDzx>y0(`d2Sv-eaO2=R29x_J@ zdrKVS{}r10JvB6C2YDN!YNKo#nyKPLlY_lo36^M4xHj5a(9u7AHGT@DM7IC@(mpj= zw=8-A?sZn<2^HX3>5XG@p<<$0a4>nTh6mw?3e**H99&lxk|a{qj*2?-(9xlk${C0k z741Mk(FzA)5|o#e^LV~$easXvt5zuhMqmqH#qgl7;~~Q&dQLDzj!eB-4Ha+=nD%Xy za;4)Fsqq1OeAD~QFrJmLkA{XtTY?;eLZ0mSDg^{GOvsp31RucKOw2S>Iy44R=m>yM z#e~7V8k|Kfi8h)eoDJKg75c#`!wCnYfn8Gqm%X4*qmbmLeA7rS*>9Y><|jnimUs zg-Y_Bb%PCq^n|Qr8lhKbaNZ=9mB*MV_*h3LK9DkrPA(($lhSm3T%oj77w?o@UUADb zbvXMY1RP)Nf>!5M#kjd0sD2XJvzOP`y!T_D+7MB$?s*<)qO&b2#Vg@lS1r7-D9WRVFX3M^_uTD%y6e0lVBjVS>tt_?D z{}PO1@}$R#MWn113g;_F3{9^&J-*?N4>MA&HZ~PUsDkj z4q0F(>Xrf=E)c>`g>ynTTg$D%#vG&Sxzqhf#LOSH_~M#;J#*!3%esQF>8QPd`iR{7 z!q+)%EG+XDuUF^qUxic)?Tn|EOm--Q9XSjD`5Y!)leuL~7*?4o7yl3YRD%~1HqP2< zd=pp0k4_z4-n<3|cw(A7FH_}IkbSTtJcb8r)E4P1-m-tquY~7j2{8|Y;1FliZpyRF zD}2Wd+hRl2W^OjjZ*lxT^>g0&ZNQJ%H}__md(?mD*Eos)xjbX($9W3{-U&Qjaq*pW z`K%vk&iTUrPUATCGKI_dkh%NiVAitiitDY6YD}h-4X8Y76G-6QcFL%{2HvM^F-|?y z7pKDsDX>hKA|}kgkaa8zyHae)Q{h)yWgIM=x;3Pv9(<$`^QUB_A4pYKRU1yQJ^EZ| zTbjfF-cxDHVv@IJDvvCOaR~Tt0ZIV)`6O{(F3w_WZaKFxFh8jg{BhQubpAYJ0TkYM z`p`c}Y>jeCNa#rpUyXgQO%v^d_@gi_|HXsR4t>Ou-+)bg_PCI4_p?3?xqzecL}0$W z=4g!M>7W&%A7|3NFm}7qj+IZu&gJ%fE5^_qxHDVyj*BW9-a?9PLAxvo%OT_!7XZz^oG=tUBLbHM>pmmFD$?^rb8cdND%*`{l52YK|6P z2kX{to?O_xI3h{KN4|D{^UXKZ_%RM*KM9v;j6yq``7nM_gpc3RH-20ozd%@;cR>Pe zL?a6~<~(69vj`w%IC32OFU)Lp>w?wo44!LGBNqU~x)N5&oZ=GrM3^!njb}I?OXu8M zeXT3#UNUngiXoGahRZ5M#yO6fk8Hk4?FS5EI*5?VQxG=Ey4Tkx7k~0s`C9y^e4(`1^&M{dG9)=xz4@*NE92ujb*^8WlQbRC z!J6_=5h9GPltHLdVfVN#~SOmW18|W{amp2=>g8vqE zD6_0uscD>j#ukbAi_8Uz?Vz@mmSrFA`C;4PmCfi?Ua7_IQw z-nB-X#US1v?+j(V@HDjayhylm!x~+-`pa;=Ou26|@SHf&M}N{orWpN!&%Wp~pCXbF zO^O@Mb+YEREdxOHU2Cp*LSO16;Qes;O9$H@H9_qd{Y&pc{|<@dRS@`la0ClruZ0hv zKmH@C{h@>YQMu6QnCSlqXup3y`0q0zY~LIa@uHqq)=!`Kvl=TaEBEx}BYe*V)b{w& zVhnNf7HJC58#S5#b;-bWll}hP*08sWHtP3jO>=usag=XQ4L81ru3D#W{AWe;{56)V zMU>3EkFPr^jAdt!of)#y-_-{(F#b{Lwj_w8_*$k4JkKBkWl+2CteNFRgaHFz{|wk-oX5qE8ilw6;4vP z9wGjE!RTvC12-QV6Bic~mm72HmTk3dcwmc>*S@t<&RHFCqyO!!+uNT%UEo9GTVICS zE^1B85&G6&BGTH{^}%gB>CK(J_9EK)_&CxcqT%giTptBmpZ_(0T#Z#NK`CV7tE%<3 z`C&^X>4>fVi$mR)$2GRkaW6259X!6QYuJeF+*-FfI3fu(lNW_RkhhyZ{D(_hb&h{+i{NiF_!sufp7w;-*Dh&G4%|?NuOsn zqhoPKU_5aBNykF!Wi;@QXeoYF~OaZRj^wsajy{;db zb(5A&Y`Uw!^q#BB%?SM2v{%&$waASV-9(iUDqJUbh=@?EFv zT)n@3APO*?vD8l6n**C#S$!^}ru<_nc<&8VQ<@%MVY=~IYf)ei*Ti{+;Q_d{&b3kx5_9R6FXti$4Zf?WReehKJwbFh%Y|)O!17 zOo~91hY1o+=yPn%;gK7<5~(YL$LierS4TNzNCREA``jPkK1a(Sa%VeP+Wwu~Gr1qA z04~7_G|=V5$+&0%{eOz;?7BMD>e!?z@957g0KhixO^)tOR|ILq zqiNKi2g3Ra~U z!kHub5m&X`>$vE1>@{eKCf`SKJJw`Df46IU*a6<0~xbFF`uvAo^#%_qgytJ}2YJK)EwwwJ5P4K0*p7gb>$ zDwZ`mY>V1!k-!Pa9|-W}}a&Vu3qAl)~3iu~wPTiJ8w-JfgsqnCKT z5VbqoHGew2d}9Mdexw_GtKsE%F>bEA#7-@ByBqTSifT7wLEo$$&kYCx8kT0O(V(ly zi$N`sm%JNO37%SX0Ps1!bh&>iUD^ z8J-vA9>rx7-yQ>H@Hh_?BJ2JH%Ipz`ztvb1?uu2@)=I0 zw(k*J5)}QKhn3c>udOWH@^Wr41kUnq5~pA$wEOJpz_-(MOM-3Gt*G_40Mo^e-t`SD zpM>$#sKVu84!Bwnr_6AwjL+j;cn3?I!(}4L9RA7*#k|R);-IhAsAqWA`@2+GE@^dh zxe(mNoMe~A$EBAwK}VL1DYt4IDezt7-1Eh5q_`+m>os=hSn5yS*qDUo!HnjO>(>oB ze<6)ewx{A4vCKpjR$dFs&$){F;9k_6nL3&e+12{`2xxk;5?g|4??OzxZ!3g9Pknyd z64YpUvRXGl=2WM|qLxxKS=4$*jz85mRzvEg}R{&36-Bsm|u#U~B zW2m>nU1~Lgri=juf5dFapA|DX`H)C?<aJV{5MaM%dsyGjIN9oAVd!1hJ8LZ+kLFs|oYYq3{orBLVEDXo3NCd?IP*9%>yD{U z?+kjKc!N5GJ-j;})%4Ns3VT-2fFIri=6|sIl+_w95kHwr4O;Qr&RY=@+XNNj9rM)Nw!Vd&)>Y2{kIJ z$7yxhno&J;DYFZLZNQNs=vEbn@e}15DsmQrglmzbbP;Uhx3OY%RUs8Z2mmql5JXoj zD8NsVgiAhr*q<`4=oFJv)$Ins3=xd3lJE&^d1{;qEmoqX=wXSTxhflMqNMjaKue+|j^ z#vOfw2|>km`dq{~v4rXRyu>J%4M{V7l39YZloY&=AsA?!Kg08%Apo4eLICv)DWXp1JWdJJ zfG(r&Sc19`45H(+Hqwttbdb9P2VriEOp%29hHp*;mFi5)ekwq4wMi(JH#g^OY zE*y6Ymsa^+Me&R5nfKPZSEl~-{l2WqXD4WIg4fxvEg%V-`T?89cqOgkc;})C@7&(C zWg&MMm9QwrDm+~WHt)sXzkVQ2sbFbtEHMNUa7JY=6Gbs_&e>@io{DstQ8J3}A?Q?b z_n#pJljO5X)9)r&P#fAD6jM-0tX%Xza#pOG3!|x2?m5?3I_n;W4SDq$`wZFoIIy*` z+X#L&b2!)?EOVAFb$6G0G-kL{AZ_C|Ik7C^)y?s%t+G7=Vn6;6`mjP+pA>!3qH?`` zelA@s{N~ht>ISfe^7~rltjrflfhvB^>K9yi(iYsC?k>RTx}$Tx@UbcnywRoTW-ErFxFC z^>I~AcG?d!!2-btl{BP*;ExO6DNnVTh+qwSv~K`&g_c(J6}8>4S}&0`E1@Q9+|f>J z;n4P-I6`7O%hiB_(vAprKJ5IQdI3drpX0m~dHmNH2-g0-+eF(a1~9m-A#`ftbi*It zybewm(7zo!6bH*uRV7D-UpT~63U;7IpWNnG?(piu9f1mLv=J7&EN+M0P2cBpGr^}2 zl1#UUx2HSb`#b#(_k+U;xPHC2#e$(O+2?6S2#l{?WFxi!u{7@t3?5sEnhz$d00}QE z;j5MA7GSQc%TUVFL)X;hJFeI=e$=7~2V~4R>xEc_e|n!FcE6_6fwIM+s^?A*^5>Fe z{brj1#!b4J>PscD4Ic*YGZ9v^uzA_D*&hy6#&sk&F754QA+%=Pb+bH4Sd}0ga5)co z(8N}Mv8p+8q^+c>!uW}EdmDIdpY_oswlkU&_GZ;7WDNCsAOlOoD&Hc<-}WEtA?E$5 zrDhQ`xaR^KTGRFQs4wUoLCWGLB6RgiP+IEqeLbs2)e z=q@z}Onuf{IH(B(hLL-{TrP1oMn`e<2+<)v7+UG6n#Dr^7ZMqT;OIgv!aEC*%Dh+p zqgfGKM(q>IZ`G@L-R(az|CBuuWI+8&ubZY(sGM&;i|mbp#rWAhdxFUzd67CMX>_q* z@APney)M8}2Lm{cN7CP}9h~V@X;4s5t`kE7k1BDltq7!TM|iH*>R!js`4p@xFgY7` zfi#B>NCa>h6;|K+jCAw-UwA%oT?ul`s$M~JEgJJ0hVl{NztSm?8XdcE-X>P#0f1*! zhkqzJD)InKwy^j}H^tt8RQ(7T86SkG?`4t5sXv?_qPNWD3W-t3>}DDtD+odR{cYuy zZU|FVF8?0O8HLN3=G-FN!?2$GFaQ2g$Ogr1y&NtNC^$t$d^_Po;thM^eY^q9^w_i;=TPUYj1Fr%Y z(TRdF3K&|u8cfWSD^hDwLiaW~%)AeblDjF`dk?(jL%HYgtx)}i?x}=1Io4I5qxWP( z7kW>)k9=P)uh48)kim-vdX4kTb1sOZ2Ts1dy$;WPlM6#dTP0=}*mRrOaQ&?r#;Kok zBVTAJPt)&fe+fmBiUXj-7i537i(!4rJ!oowt@3X<*nA+`B?Eyu)A`iSICKzpQFtL< zstGRCp4APP4dejcwigF6KHkAo5Eottzol2NBwkZ`>MD9Tm|qHln$KDfqv)@9c_24> z`i){YNP2ey=Uz2PA*}mTrNxW+RhD&Zu&77%!*)V?*CIR$+2Q2<0b@Bff z#o)Y1ZSCu|p6$HF8C{B2)(F55oa=bukuyE5lluo(bxN;1$ZQz-3f~a#ydFs{b&+C( z2;5OW?ya-)T`;*1v-}Hi{XW@3g$o+-MMLGYDimx$`J;t7I9YYZxZsRN)wrKO9q!-l z-wi%;@)py4w`Mn`N)Z`w+PT=8H*V99T~X*Atb`JK3Vhz?O4c-*~=+F&7 z0NbcAIIGO(4WT(5Lv+4m17%;LZqNqQCKg@bbnfzGJuTaO?d=V~2VL`~9Ic~-IR@n; zwr%Tw;-^+JMuSfN_MV}k#t@2zjU{rY{((nfi?hvQC;ljzCSR&H&9v7tlPDAvl0o|W zRVAXLQ(O|BG?jhr;=Rah0oIG0>Al7zULAqgM}4Nj#`nR#@Yw_Cm%L|v^lCk+3392z zy1uqt-Z_^lLG$^B6NULE8WSvqwNWj5bY&_V?>a_Y=jC)C`|UL}0x4!GQ0$O`QVHoj zZBO{6oCj;p$k8PBB6u54_7+Fu@i5&c z-18$H=~AMCeIwi6=D=<+7K-VGi^T`y-#C=DwG_CA)_Hc?1x#%dAg`?Kr|Fzir#Z9f z1lfxPkW7W7h(o|ZMBXV(X7QpUG4I z1_$Kj@>N32$}`G2JZAH8Bt)WdSF$YY)}H@R;0596*jJatE2KnetFXCi)4jS=pMhe` znL12b7Z*#u&=0z}zqB@>Qt~%0mjR+p)Y|CqBF|Z+ZG=&+{s@$F;O7 zK7LT)u-1x2CXxDWp}s|o`jEdpKG@4iHR00ny>cImq72}!Bu;pJB!2W?shS## zX84(_`Ww@jB)P+AZ(`okpkiM-;j_BiQ33%b@y4h`Wovwit1kW@@sQb)G)Ml~YFPcd zQA++!N77AZf?9T|Qkam!;&eyH+HyOOaIIBxhL28U<)Haq5l`|{-kWkW-&v?}(_GmQYWgJM)U|Hs^PdQEdB1Ol#*`6>?P{j!YM_ zI{InEAKDMIIAGP8F4Pe6fxaO1r{CCDusEezu z^!<8?ykAS#jke4q!lP!B<%#e}I#}3dNpJBHCQ&f`x5I~97n99hlVHA{lFv8C>sII*Y<7q0*XBBG2D_IOE$unpVqWDo z*!6trAn8UuO%PEr(5$;ry_N$m(SI6?q@_>24)-;x?5NI9MXi-NUT+o5!__*tE$7|- z+{D)zecL<%TQ7eW6csZiWjq{*)CYnqkx_17g5||}6J~Dc@IE>`R?=r^NG#rIn3N8Tw9J zC!EWR6PCAVH4hM8e8ojh+|X#nP-rb#4^GGcS`ijRR|djeWbdwf9w*Tufjk3TcJ%^3 z8@nI!C<6&jcgjoOXS+{rwE7+Mh0&8>^1HPMCF0#(O_oas#NQAClj_E~t7P3BIXSdF zTM|mm2a=)#={MS^En9>b`HM3g8|XA{MO$G?S4e&>)I1IXOtUzI+=-)exw*)hC2>op~Pb5PUcQyYyv)i&HWu;B{K@AIv(<5e#5=6-iPi=DnL4Uz@|LW%6U zj0Dg2C0^0u`b;%55CG4SwHs)77|7mpk?r>uzsnPNLyWfiK=LA9Vz`j=e+0JJ=M|-B zKcwkoows|f(N&)yK#;`d>j8)%~${dKNUc+#?b@+iO4*Jdwo z88v=4XRVIyOD}NlZ2f~>c&|FxhEQUz;SK0mKR!D5%&OieAgvx?2dAbNIPPC)@yo#* zfL}cE{smNhd!B2WZr#}GR2yw78|BEes5pdhcrd2QOWd;to?<4R;tgUTfd3tx%Qz(h zPJKLq8EESW(u8<>x*V52vShX&XIUp$^+RR%oNc z=?%E~5kpUjam)Ldh+$4RrhTGfHZv=?Cg&lKq#SzFesS{s1*lDh2tIqGD{0N=o!rbV;%w3tHSMO{YzxF3Nh7mE|gNkqYX`AHp4Rn9z4}R?Qlh?Zf|pyLabq_rPU?$uF!n zYwT({O7M%c-yG9gKL#H6RQJcqxSrzkcm>UbcA>FL9b|wG}T5j5F)XVr6$io_ULE5Z3RER zyB0*O37N*tcn0Sg113`sDqx_s0RB3`cV4P>&V%Ws0QD|pRHD2aKjXK{#AJ=J8tMPV z^KpkgP}xlL7{!p(k5Wl)-ANp;#*ObMXBkO$n^qsRE`-4;8OaNuVK2~+?BnMem5AeU zrFFXtXE$wBGK-Uypg%~dsbD`aDF*w$l7dp5$!1$s*>$8{lW5p01cW21zlZ`#5W7UN z%bY&U5J$Vl^2NS`R}G=p_)?jMfTwZvo|YDJ5c!R`=&5kPgpd6t@3?MBEYW0!2zD8QmH;Bg zdJGl|6_yGv5){80 zq+)-*ow_~Yn86CeFGx`Y`<6pWMZ+02S5WZiWj8T>Oe3AxwsP-7aXNYw0jA+A^_;3Q zbI$+mJ7lh^QN-PSyUpvzQ{2xTdx>`Pu;Xi`#`gAe^j_q_@>)-mbY*I=Ta@v}L}Y&U zaeevBs{Gl18mAcVL#c(L^eXGv*i+do)YN1Sav_A@X0+xnxS7RGZy-HgFE820m|Fj6 z7F1AMGj~xF`x8PuvLT6H#Z25X{f=ID5nI+xHcCAb>?f3iXr(-QR64D!SbrKs_K$K6Cx1r9=&`VF4ibj+%DO^yyIn6!3S&B zNhI}EVhd*stHz17EF$_E4OUKkyR@Q^p8{G4rKV0xxJPY%X8rm7h}3Y|NmqGsuZjvWkF}v@Bk^r z$TBRryLU<1SIpT~Re;{p!0TC6G3RrubThHQkk(>k_{-P)UMTg^HtvZFMZwl*N=?drh>ptyx;%BE~$rGq(6UjeXUxHw7*6l8$D|_*LfZ4G=o%B$NGtVtJ^Ls^&q3|tOlsf{cT<7FkEwo7lh3usT>7kb^T8c)FuKK6BZZM2+pv-Fq*^!w^_8d za>mf###R-J%Eev_-!%DszK04C7c@X4I}u^XLi5oO+Xg0RL~H_CywXOPA{w?2wDzm6 zPdKAp1$C=8udl+C@D^6bjs%m>|k_ zS9P{pZ1?YOuf>n)dpv%++EeUzv>fj&5z^hENIRG{d?>1wsjm6*;g@6WfQIC4WSTLS}=f1oV4-JjS(b zVwH)PaCm)i!6bNvVv4)`^yRXaKHe;^C-0t3@=i#9~o0u1VA_NWp*+GhWP<7L{@KEZeFKL{~r3LLF^Q5*x8Yq9id$k4E6M&+8)h zoDr{Wp<}cSmmw4yTXuge=@!~c;D{j^GDj!pPj~DH!3yc-Qs5)V6`3(j0vl6VtO@kT z+$IK)GXv6W^p;Y29JEH?bk9aVP#uWmXrv9b_l=KPmNl?oYh`X_>a$2d*iqcNR|%Tr zFpoZHa#p&Eo86!m{m_@JWo$Tq@5hlWANaLUemG*LN?Z{y3}I4A`m?LZ?dQN6O^cL- zD{3l(axTotyiuos_v8WMNd(^tu?!ZAZ&Klhz7#oA9dZA&bk(YD!Z^iBe)s7nGmBcv zjN;+C_mizH*KmA_zCCU54~s&~eptL@YUm^u_8df84ysw6+RZp>hl1a2X0A3zNO(W~wUw5qN0K~8K{bp&@yz47WLGz(`1G;E z`fc?4Ve0-rf|^6S4diHSAASGJ`l7+Q#GbSuCYH?SiM(}Zj{7n*e^jgl$`H5|3c418GqLflcv0jS(Gux-|&o2y-sc75=kkbJ>RC8jJK1cILrtCoKz`om7>~L zu3S^mu#r~ctl}M;i-ug5s(b|T2*G6K-jjUqCGN8rOfvDC_r?n=Gy3sJi-g`Gk3Xbf zPb7{OZ%4*a-+4(JS47u zX%u#8DiC&!SBSBMS+l0Vu?$s|xogj*2!|Saa2F=AMs3FC4axA@9hoKn$h|k-7k&O5 z-o~5M2}hk=t%k9eaG73`+-hDddIrw)>~2=d6q;yniMKk~vNuc?YI`n>f<#Y`Wxnc; zoXYLnv1vYScu3tuwU8%<@uteFY;2FtDh5MfpQ<92B3)KOt5ctrNF7c|Rft^*QE6kq;^*djRw{Xi zen@h|VOOv1Q6e>fi(>znRyM#Q;BsJX6i%c#E7-|>dyNz-QYqI4;(r`IGHQ*+d_9%% z=Fz7%=qsyE>POq&PMQ0ZF%+5=JYxg=6V~oquXii{PAlG-{8b`mQ`Jldr4mCqZD$jo zEAS?z6m`YY#G;-`h!sLgw@{~5tSi8w+JZ$kyk&{S8{rYN!0@Fh!hw{I)^b>PX4+iU zW)9_ZVZWkLmw^G|nQk8P=ChTQ(KOP)r>Ifz^Y#=#A zp@cSwHP0Fsd}PP~CLF}85nuApdl{z{9(NY$&*G%|4$Qy)UeoMj^E|s$rT6-qW{W!< zsc;|qPoc$#2i6WnmI>LuMoQ0y59Sq%5BNQOdbzUlOES*nsx#siG?nZNuX|jN+*2 zc9GI)GVXd-4i9GBV-#3@$srwvTgL986avwe5*{1QzC7VT6V+^3jV0Z|ieUl|nSSED z8m0I&gTh@OUaAxBf7XpZw&+SZn_v+G_K!DR=l(0BxRICeT)H9Ou8>&e0z6G=W|3@W zj7;&A=a~}TP8I*lz&)g-#6vlcd+P@A8qnc7axTfeetNzki+g)!-8j+L(tqTfpZA$P zKk>C_;?nqfv(9c&$s;qa;^fQID69wyjeCl_=Pz{!gz^u{)SA?%AnN3pUnL`9Wz#%; z34gxlQl;DH(SMHhzqdtNO^Fc9F>kAIE;MT0S~LxV1QJXrh4f{l441w83pGz8p4$|1PttCgMqYo1Z(A?P{DSAt9_!vxfWoV(;MD?59#1YNXWT zVnnW*++`wLlyzM?^Y$BGrp>%RD1j1#I(q6dk0z1&_3gOb(+xvCaliGj;HQ+*&`)~7 zs)!Q~6vx#QnRMyXg>;)^S4x>?L=zGMQ6&O(7GH{G=09_0S+M&R1$bB>``IZjJnrKjYE+`Ky#_ zt6F*c?UL15y{qcG9(#cfofzw%8Y%CT4kCDAdr-(PrI_@Pg(O3H^yTveEz=4=0YW`g z3+OrppK!v3*#%!1$h8uOMXQC>0OL}s@uYl3D=f@Bs&Sl$f~hV~88%b4Ec^tXeC{`H zyNTN&{V^VHG%?{3av(+xwZ^3Blm!9c3J8eYD+l6xPkwxIj~Tm+Ad?ksoR<m|=<*$c_mUrrx5+ zq+|<}sZngK>aDR~ffYiUQqLXo92!#y?dq%W&pKy8@S`>)XT0PRI202e3iSOYkl#z=PMe zLFHZ2!OhLIqmL;jDWoqley&Q0)?77Am50U=eG@`Dm;eek=(TSWmrx*+?)lT)~aO_>U3^gEc z{@AxLo!=l99_XavO8WRK|KI}veDU|DrP3K6vL(GnSzNjVYAwDKaYca0mU?NCd0nh1 zNg;)$lwPY2Ag9Xa7kvSMw8D|7_kvg+aWv{H8^d&VJo)dNQG zi9g9X*?iw4E4YKbwrZVfbnD>$?AOybo>JekZd;hsm^mC{3kf^YrIBS1eNxqTf zt14YqjyCYlzYyx^`ztrwxI>NdahH<;LBt_Mz$&eeDZmxlFe`af`nkLF5?U1bYQI*W z;$l?yUdeZUa-M+fAPYIHXQS~#og?h?Ybv)x7L=c%W4Hz25MVQ=NgBuSS-W4`rMnd z!pHZHX8|i8;$$8l6-z_OY9Z;qHLAL2^{i`*3iyz8C8a+tZ^5lHddCPg^;MgH(-6Xs zd8~Md*yrx1H<+p3{Ku)^+_mFdR{{KQbCdz|9xb=(C9(*ul7Qfz(EY1c} zr!yO$qn9VPt-f;`ns2ARq0zPK_IcUonaRuvEfQJC*Oqy7dIWWMedR;~M~gOU=zivW z(`$frf%RPSVu_C9+;0<z{(+>jFZJDB$hWwq( z;iW~Jf02Lk)8_d}7}3NIjRsKNh<_fh==$@OQ3MK9S+F)T@(MtKxB9y}wRUjLej<5E zR<;u;r*tW}dz7IEdV2Tx~s!0E^l)qWKP*|XNK zAzBg2g@&@VW!d*bnYJ0erjA>W!T!}lMH2v6(NL`wLbX!cwW7QAt=E255!Er`-0rZJ zE1#HgdN-Y#UmXMuJeQ!ZrVFK+#=~axeq)(M1;AI#mTj9ON@k>qV6bxKz#9HVNcUcs zlxEw&Ze);0@mTot1+9^;|Elo|)_i*pNppgkwS>aNFHYp3_wSfjB1zGKnBCPyLy@kh zXyjeCWclQuC>OI`Xa@u071g9vAIwRDQJ;ov1N0E@bdThfsNA^+JmSF{jJ_BB5)o4K zO8}6R=H401<6>S;@{eY!-YgVO9sUNi+<=mHx?Fta)2O6z7cIvvK+WM7&ihj$Bi|WkfX^|D3rveYE)9($+X-!eZ#)6DvD)W4+ z(f;A#Zt%8c!pWI3{p9!6aP4-_OMHfSbbxgkb0VhJzZS;nX^N~>QC8-?B#-M99k4aD zTvDr<02$M;cx?kXpU2%weT)AoZ9L;LQnPk-zfn{f$V-v<(C~h1^JfQ4JjMb?{wd{% zP1f(~8o^Mf)(wkw0?|d@;b`68F^^*(O8(GlJm~AGegkWKnl%se=wdI^7JD&VUgVG; zdEByOVw+u^7VOu3raFT2;^4>WHG2`U+8`*m5is>F6$g1MLF;D(~juZJ`y9NF~w*U3_ zmexHr2VS(30s;Y)ai^88xD=Zo9J4;uw|&M+|8t7sazK9x0epGqdwC2|h?QEX7q{Cr8#S_}6DHSq zRiFU*<#%oNPlvmn$Ts`B(r)Uy16w+x{jbcL?d*ZHpu^qmr)gO;v(iR0`(2;pn{()N zsFtFK)3XhpsM^)9>_ZB*5)o{FO2FlEF0=$~h&y2C<<*`@{*KU>rWe z<;M;>ncatyJCyS2))y(X`>QgTrw`e&`_6u~dzC5OCw^m-1UA$8BcI+63eDeo)-~=o ziXfvOM*=rkbOy=Ds)gOwHUvS_ z`NVht^R>9rW>oVI+fSOJux|?b$LB>57o(QqDVR0NeA;%W*3&R6R0A`6EW}ZrV(UAj zFSm~Y7ppHEt}NMLoV0*$^*N_djY*}y7g^grKlQQ6RS?D?{Wy_a%(U1~SQ#06hGuXd z_9q<8Ua_twQ=Gmn#Wb)lYL7?t4$yTLDG@_H;lbdvl;Fw0V|=`Ltve&oc; zEiX(X1iuWrW9Yr-YEmx?a*KCu1($k zOQ*X|2Xtz8ReD?_t(eXIG*$}PkE!)>SF5Q92h@Cw=ye2sGud!@2d%h}bJ0;?@~;*>j zgpfv+`mwZ!KC-&Bb7N(U4D_bS*1l@d-I;D})zIE7kYP(+dO%ICm$`<3w2g`i*BmvmHAh0GiF~z4Ia`Cy|DKlU^NlH<1@VY{I9SzK#gJ zYwv7_C2_KG((dVT+qoTAr(28|l$F$3C7AR2kwcqKOznEio|xY!%kIXNO9zZKvZ-t5 z@Z!SR)wKmtcxoS3r2^9HQr9$^g{I}4hgwzmQ%2|NX?oi4+-PE+LRMpC;xkA2b^7+y zsF-R`Zp`%g3C{?1IiKKUReERTuX_>1VLS?VE4y z)?tI!`+57xF5NSuZN|u$epnIa#zwB?N=^#jx2VA?9}>F6H<<6uPg$?U-;4l4r?Bp; z`zi)f>4&7!b7f`hhPW45J-f_}rK`K92S_%!)V=VKMZ*P?IUeSA3487m6(U&z>8O=A zekXkjzoLJaL!VFL?vaUQQF>MmD>C2GzC+ulP4|}!vR|f^eo%s-zcfNS)27?TxnBI2 zLOH_I<`uN6oPA9i8u#`gCjq|2pF~5|8`Y>s8BMd1I@fvo0fB@-eK7TY_X{|&Xihba zYp2#9C`GuO$5+*{A_&iqP0k({+h01j1`4HhS#YEAPIT7+P+mo~&yg!mjb{4qagdq6 zIRS*|QiEnGS#Hj4s3d%hnVRu~ejAPwhVRuzw2A&SYP_}I?WW%4E%k#q@g+gA5&?wzxrN5(-I;%yusZ3(QR(-_>x zmc9$%00T8S(@kKfxY>LVfVhJL@{-o6OGkIln8pQQ?9Mng)F+jv#??(NkzW&m!A8wi zrcUgw%Qdxf>yme-Zb?X0xbR>tE9(<&EyjSoylSRNQbJMu+(6Q=jOGsm2Cr>c=XnCX zHmBfcVwq-4j)Myx=K6*JIjBudK(yXAQ2Qrm4Kn1QesdsWAWQY1tFxO97L;{N6mugq zp6$n+Tg=xBx0uZO=5A1Ia+Qw04mjX@{=WCC|DK)sSJ=rcVM`H%sWf$zNJ5EH5CF;r ztOuLM%E-Y&5CSS5*IusRkc#Qq3Tu%{R&y zct$Ui>O_i~-}-wzBycZsetzUA?Irv>87L*`f5M)MmP3jx=Bi*~VIg91T0uj+)M!Nf zKHZnM9rrA3VWE;gG{t-kzV~bC7F(uul7A$aswpLUh>Ue81X8I{Ptv=TUcI2%x3|bS zpLQhuruV1p2(Jn9cMavdy1ceR)(&Mzt^}o_Cu>Oq>1r!UIj1=_9esA^8dN2OcV_8% z7%8~8tv!~hhMM{VP$CHP-cGJn#l$1ePA!l_TU}d$%dJyam%Xox6jTc74=H-AjpluV z1T;lS=5Q)TqeE-^P3KSx32E=IKDsPf&Yv_&6eq4flao@>zh?v0UV|!?3S_D+p$k`% zQ&f|tIk^qOsRI;|W8zA>Mxu^tM(2_s1929{C9KT?O^RGZB*vbdSXhJNy#-#dqz~2# z;z5P3P|1dH8tX)SO{0J{J@4{cKcH)i>nfUz(^a&P9+rx;VcT0zaDI}t+dyqA%WEoV z0)KDzIYcZildlR#gi&}it!yPa*uG%`QAS_i_yMu^kv&?9AD`Mn>;gv}w$s0AC~2$f z&qm-k?~T6x3f2#4Dr;zV3`lDH27@%TP#ZbB$ETyMrWtpY{@xVI3&xRB4f&vtc!SWo zH6k0LpU1`r2Z#Q9|#`#6Xb@0^rkFS~Zkpcj_rC?x{eQkwhFR_uwxFc;kJ^_c4 zrXUB8AUoJ=bvCvZB4b%f>ei|J2{98UYefPQgDVG*tGwIfJO5yy0RqH-2+o6x=LT;$ z^0}~^y#$J^Z<-X7NjbS3#+Us-f*vCjAP=)<*YFb@bcJa1 z25=!;hUb+!(pK+hBqx*++{`Q-f2|6e<``VA%y-IkC|lC!#`d7ecs_7J`YRO@i8x$% zI@Aj*cd%mPBPr~v)eRH_`z3Jbp)}ysACGSkUZ0W(Ee!79p=>>wdG9{Anff?SPM(DA zAd`;^Al#mq+NqIyZc8IYr&QGv%aX`xl)P;~msTZh4*HO-E=(97!e6ws*2 z*rTo8F`Kf0Y&J5?es`x|Hfa9(GvH4STOv~M!9c84gW6AYUek#u2WcN+P+Ygd-?yvv zxqRs~ec`fbfbgJ@oibq4pq$D~%WD-``@wadv9~9qv05NtzYlkGVxOZ!i=|6VE~5I{{{V z6IO)eJ3-)Pwt=34X2T}gJa>?$%F$lYYrDD8*w}WZ?)tSq

9`O!jq^vfY1nO0Os& zhzuvjnj9jfBz6a9^*p`uKP&)Qte+D}AmLZp^FS{__U_c2ENEjmF|_>8tKLN%^=d%6 z^g2=(7$j4FjriHue1RJwA(4Awh$|@xbtpSiVFxpLHl$!jbBU5?Q9@KKjM-2JpIRqy zN&^0=67YE8ybY2_ij9OWLJB}%v7@6}&zEX-!WSe12gG3p+K-A;UFMq?Wij9tjQIi& zASiJ;Qg0$(`e}d(>kXaKJXO>gU9g3@olONdciHmOj^UgKs*b}&B5d7V7ZE_nKRhH7 z`SmkG(qD&Rp;dGdR}VE=t8o&80UXebGZmd7Y$Khuy@foyFB zH|(H7yp|-wi4282{1;+MT20;j^e-Cz@7c_za;)RZeH4hk#3L6cShrILbfj3>osxKT z(gh<`e19PUa3qjjq+p>q2iW$i+dgiP@?Y%Ft7YMA!THX=ZJUq01q?qamt*pIq?J}s z0AYmnw+_`^JSh9oM}?Y6kSEI-G1#*elPnn;DFhbs=$!f;U@>reTWsc4MXT*+l8OO7}bEmiiw;LX-~V+-|KQcfZnq2;fvZ<&Ls97xa+` z7v_NH=*4;ZYJe!u1wFZ6WyyG;ccSsl5>x>-1-a$U$IOe^R2oq9eky53hfYo;qYo1Q z#9k02=?B=SUFoOfU|T{J%3B5oEG2vu#m50SXt#@!hSqNJEN3} z-E9&}yr0u#L;4Rrj=+%uq%oCa&=l3m^};7nqUW7GCy z%wx>L1;YOf2uN*E?#<1mHc3L$$6?VP1G^X~`{!hlQ=T}#EMDCa{(P*%*pi@~SLr*? z-<`2vtJPAmW+iuF_ngxuj9E2Y6;h@wIh*#ei0Sv~@T8Hm1F zC1tupBF##X|grJ^L^D^SP zlqTBd+!M4&#sESS=u*W}DJN3>QKHhII5(CXBp@J~pr2TL$G38)P|Sc05@wO29gfL% zk>ml)KLGRz=Zh1q-|Q|IIH1Z?LvciQGUhXss}r?A2^uom2E@}KBegoLhv2C{+gm`5 z2mSd45GOxunm@w%{JPgM2#c{SEYmx8xqNPM3GNS5^&sgZIc+sHOfU!O@+w2g#tzJ0 z<6;*LLJZ*+W(SG-0RkxvTF!;Ci6*gXECQNmY){3kX%zr{VL%XubBv42twsw<6~A~` z&&FBVfi4NYHvGr|A%;gHg40*U-U$VO6KRgleMfRKnT#qv&sd3*#nkZTJKZnx%a}v} z36GT-E|$J-PJ2p`U6DMUQ5swwCayL#VifucVT{4Em}6J;KJI#GOTou2aiKb7oLwgCC$=R=)d0K#&`>>A7S3z2<;#red7 zZDC(me;vdnRr|Q1oU;kz6dP0p+&NWA&V?ib$igigPXKU&MI%3<21sDoz;@Uq0i=-t z2pHq0tOR2es0I7jK9ogBqcGIg2AJj3p&!t$%nNQv>5b6?`j&p`gyH_tP_xlSd-8$R_p;w*LJ+=cJh!yMarT9%05Bwl{TP&UibUNALae(oI8g>n2c=3P&+eHp zRI`TWbch%z4+g)h5pQn^V%?!1F<}E{FfdOE={#RWpATh0lu1oWR`swld zfRwjRgjiaW7Y9*1Q9yX3Q>5$GhHUETEC;l^xHO%_o!%_xXV5M-GIcylD{JGQ6(-6s zia(i=56KR;4m@A#=K4mfZQX=k!}uR`XfmD$`6}e@Dl7Hjrt@!Sr?(xc{!S z&2}xC%_a-~&OG@rGQuejH|)PHovEpbf4V5R-dU->r3sa3{cXUL$SX^p^_E59tP!=Z z?UMD5s(0cs-a+I>n$S16N-$E7-st*)8$q)!{12&KIF8{mO{|9-qioO({)O75J@e=c z4@#bKN9Oy0LXu-w#3YIoEDx~0&~#KwJogIW;}UH2CuEbyLTSUn0{eV~tlcL32z%rh_~^elC3TT_VR>yd zYZ#6Zb3%<0Os$9$pbA{^MTF3vL?3_@+zBZyh;T!Gcf9Z4S=C1(=3I0g*>6MLtTz3n zB?#15^APW(6Jd}NEpWEt!%6TT_9jW!83nHYiB19Qp;&I-8G`V-z7(NR#KWR3h7PE# zSLNpp$^_WY@*F3gc4skJ55xWqz>YTAg41nb&@PpQ7o-UOforyu5i=T4e0$IfE%^** zDt(=LKB~`2wxBbS-e9Y7VqcKwDwZ(^Q7*N`MbJrZ?zTf zU1R?2_s=~}dK0XYY*L&(*BxMiNF-;eN3SL}NPzGAb?=4AMrpe>ZkOmnbvUe%gJPvs zzO!_omp3KF05W(X>4ey_#HdLE6Ad06snon56`LS9?llXrQx@V zUI%U(3@5{WTkhcj1SC=rqrFJs+zR(V4UGQ=$RrHY_4mM%X5A3~uIISWNW4#&t)2__ z=YpQh__IXHQq5&ksaj_G3Mx+a$5cr4kdh_OHG4O1sAD+AO(L=?V#a*=vL;TR23)Z} zu9X@+IK4XQM$T%wWEOv~Q0zTm27S@dki7Bo1SRL^!K`ZU1Nz2t# zWkom)qW+e|>aT^w_>A+HwO}0~u|}9CeEPw)1DY%qOvGO5<3lFr>jccCHBE!F@`^Ij z@sUx-@-3~rCaQkLSXQ)$r`9Zg*LngGlqz};qj!7ODjL4hx-dgLeJYEZxzAZOEaL4z zU9l36*CYo1^o^lmgD`$NLaz;j@3dI%ks&qefm!Hjr&g(G5XEbJg<4TVe4Qw;RWC=p zt-w_88zKmlH9{0|p=jYn$9wM2##x++ay2jPGyYK-93aw3hAvF?mZ1}X2!jKl@xQ2w zbLQ%~qeZcR@2O%&KP9o6Y008v7`Qqu)b8q`+-&1-8~*4c35S9y2M}UY5Up`sDP(n2 z7267UYiN^(&F7`34>&RrVuV|0^^?{E!oQsFGaFnPBDiPIy@DsV;>2QeG_R(SrqJVi8r}oaU_txZ-7nk{;08~Ed5gF)L)Ju81JFsToLpp z+^?XHYE&^GoPS9@{0JbzNz^Cdyw@b#29!%2N zXkf3*J&4%jkEKxi5KkMlHt7EWP;nj9bHX7G#Ok9F&e=`ENn!&BU_Q2LdU#OQ1z^}D zv<1WSB=*%Pkt_tKp?wl;Z9j)TDVLPtUUV<}$1;X!kkm1mHrGW<&1{>q(xBLjhQ+4fh34^ zDdZQZStu9QvePO=56gu7GQp|!HA>jC{{?!=i%>l0%3_Bg&W(uS<7UJVo;>}=N>Ygu z7#yL=OCQ-prJz6))Cmz&Qv}&AUGVP2gR2uj5?^U|+xYHZ)8mcAI=aFN3xHD2baHnr zqVA%c^^Sbd*p8cDX$@t5X4;+nBl;^Q*Q`+dcOgod&mPtJ5`VuX2pi;8DK*py;H=FO z6-84i4LE|J?h{d<^`j7N_X5<68{vSpUnzegE<+Y96)4l#bAJYzkbd6YX#X%ZUyQ2! z%s0%gcL@9uPI*I!0d-ebag7Pbo-|6`@6HGyBRlivjs7yP*sq0S=8v|DJuqDpY#DD* zNtqfCS82^S;iIeL4b@>muyaf6h?l*5a8I!Oi^7k2a6}zPFRU90^B3Mg5~Rd+G>_Ex z3^C}}yP4*mdrUF^Fw5XDq#thjV`#ei#d9q|Dnxo-7LI461RPuN{?7#E;SpDOQ489o zND#mgY1|Kmm{Mn1Ld~YTy)>i@L2R^=x~y_=aXl1?OR|Ljo6UNnN^f^9I{uhM+589< znHVq6MZ0fPpe@+2Dkhq|h?xKo?hX^oWOojjY8e4Bo}d2@qr(H^f1>GfBTm{StVsFm z7KXd)IDHNLoY$|6 zot2bXf)tTqYw*{9rztE2L#E>IdB6aSzr^AppXNepBQ;86k+(8cw;oDm#xTJA^fErf z07!>I6kiTF#vcfG%H~{97Bcgzzd-@MDq*&=c`~P2P!5#rh={=mu0JdU-kU>(ex$&b zvkOzg$faur_j$+-ROBHT*$R0oAh|NJ?;t_hoZhoh_U&~{aQ^Trw9xo; zka@6Em!OvKo3W&Lh~K#Y}g<_sn|2buZLT))9dYRybo6E-6sM8u=W z?AXCu9z0#gkw{%Isy+BKKe;jDVb^|e_V+#6K7;)DUGyca`vi#z5C|vyZOD@(@(3cX zIsQuwKil{GHc7e~_aX?_DuUm6npuDvD$1(X^cWx8BTGVs*82L^7CR>BGiSpW5WLRM z{@o21Kvcl3&-%H^Z~rThLGv%;`VADbje40r;F26H_+9fYkwsE>eC+cl0>FMN>67uA z|01Fu++!^3J~Z^zr0yFxrFU9*nvzef9HMg9yVoz(b-G6SHSgDXM&#?42_e@hPhd(ZV^QT#7Uu_n{|KMg^aSI)50F~1DhmV&%$9Vr8&Ea zHD#Ul^yZ$zClTy3f8cft((c^cw_Hv7<>O~lro?)c8ROp=o_0Poo}*+2a3{r*%jK#} z5;bCsKW{d3tH;9_IOg+-Xpj#JQsMx%r{azbd&`r?`=g{OiRG>UKwTw-;j6%kmV^Z9 z!$?(1%H()_yQXYGQXiI^e%rxS)j)9yqfOXShFd1;NO~V@<|8T^5ZggB7Lga2<%2T; z0n9paF5A6=hIRh-KgVutT;JxFU4aLa(+SiQU)92TLJYyg+Kz~-<_s%aD?eKv9+kkpPjwvaTbZrCw zGfZ!LDX_4X8_0Y-JNV2|J>pP3Lq{6bFxGXhaz#a0CriW?n(+`FXBBKcKU4dDV~xhX z`Z9<2YMWcUJ6@AqoSkOdQF%_=^QVf%JA60$dVGJrunk{>0l5;2)l`1B_ipJwJoXhq z+xufxCXMg%VJN(Z2u$B=_!@^-9z zT^nAcu-hFPFcY~QWvHb$5Xc(=0!bn>hu~g z#zooVuayU>#$GfAM(=4H8bWw6rzu)JX81~PPReA1?`iV{+2+lzEK!%L+xM9uS%S*4 z|Jnya0mK*8|CptGS;7ApM9BZ_rONyNv3>JzM=1@&YOL$}<6pY9R{hL=*B3hhM2Fe^ zy35c1TC#@!;moV(8GYn@2&j)>4NfQD&;BKLjnE zvNhRXg~3Z+p0@0wq`eEkK=J^%Q3jvP3+H}I}fd~Q46JSQkY20O@C zhBP!{T!1P2?dL6x3#ZO&nUIy@0RM(+pBM>N55&!GNB^qF9p|U3DqJV~j1JXysZo*s zlq9}vpUh*QrFrk4amhhyxQYDlYPGlt+p1^PbA z=}LfjSh<-;!U5)K|H&Mc7G@P}|ET>i=|PKje&YyFx29Aw``#b_cBOcUVz9`O4VLgApdl-5f@p88C0%yt_Z8)0#~ew&nF@UUW8r6=Ets{gK?SG^K-3 zI()vphd?0wty zZSWo)rFY1ka^%D@*y}5T6Kev#98G!i@v+X-fx|mHvU;t=4*}mOJ%Qf~-AO*|`uD`O zJRv!L+Z+z@#$QVLkK})^o+B(Wi1F+=hIyQ+kDIrWaDoqO8Ton9ZWa%;JHpT>Z=v`P z4_}I(qZ{cxQIg2CAdmeFd~K}NC$(2{yL>T<{~dZmpZMO=EwSmq@d|-lVKZ^|LUs2f z_UPxDaANks_4<0Xj)MTkDtYg}NJp5LAE8M5wy@De16j04x;}0D2aqiei{HJ@{5x(1 zgw)o`M6{kJx1LVrZSF>;Kd@#<;@YBaoxe1{xwS&h-|u$XVisb9ah~8VOd@mM?NhK~ z?$mtv{m?nJ^lpw_vufa0uAlMrt+dxb#|W%~nNd>}x$SLO%bS$jLoa8)DW%m;vk4VVNmUE~MF7(EH}S z!{Cmh<4O+8xn;jFTa5`6VuooGY;Z%G7>pZkPG@ucW{z%rOM^`GG?gas6a;S2@MN zSe@M3&NX`nb<3mRdVZ-U)24ew7wIIw6Kw?{$MUM<(w-AnM@DSztbX76Z7smH^nLZ< zK11?yxj*4tqc>x(bj?TvJX`2(16pQz5a2QY9Y`#AO|JO-<0?}h$=%XG>(UQz*W7^txd1rNV z!SK0>1wsw27#x$vI_+eIiZ6n2kMS_;382fS+%*wcqGcs1_qm;h`=P2bVv9Ca={R=HV0Uz>lC zg=TiuS<709=O3!|dm4U9nVnl}uXZvQ@}N59lEnjfbY5CJ_0O^#&4ET#n(!on96?suSO6tEuaqa5{BKS8w;Xp;P1x&`1W^x zl8x@wshI{oCrYyW;Kh^e4i4|LijiICR@Qg+?tgYpaMpPsfD=c-4immQv7O}r&Km4k zZ&Ag=h#$`6xkB^^x{eQvh$f^})SoNl3jj8H*RCn2ucfiBe#%MG?YExCDL0oVgsRVu zU14U46lgXSRoq88DYOLufFMIcSWtO2)^Rb-Dfy9N)8-w;7$TRs_)t2U)p2^4_XgCf zsQg(n^BDNJFQXO^`Pv=D0Z36!5i{G<9;H}ru1L8T%+0^|cE6nr%}SMfx2<}|tFTy~ zCFl`ssY_+OBO?l3R0dgDJngu(pIUmr6G0(Og*(XYykt2*3dTA19SPQm$&9g_WoBP2Q%cF3aox+AHzH7JRB1 z+d9m-KuWBO(xoxP9X@-$s9jtC?%G;CT~;NB4oi;@R8NX@ZUEntGcl0#`ZuP`>*|7p z#eJ1l2AN!*<|03uIFdtOwHCJkvl%m71FUnGX*D$IYk&q7N%ywfc>m>I%{haO0557{ z^0%ee-FZk8ToCXt-lP&kc`01tYuS!Jy@zHa@gneinN?*dlW!2+2HsZs5xmRS$z*p7 zVer;g!NkKt#N)KWN7$0X(MfN2U+*RBJEo_$&bSCbgzBl zZ0t@gowhtC;K=pLtbM@=pPIh9jH*ol@yatGcejaMW7@W62xlkVSX%qz;vW?Q{wq={ zz@LJv136O$Q^~qUUD%528!VoyEXeB*Eijm&lH`87hIu&K>I^K|6eVQGTdBROt*xN@ zwn~nwckWm0`jl=`9;B-Vyc*8qDz^bSiDE;WU~m5G+aV{liv?gm@C5gG{Q622UudBn zu9r~pIGhM-sXW?$iWi#Vj-noyvU)llftMiz#K-V{LN!;n)Rk1+&mU2a2OV;_q-3`ij|DEG=l%H|JwxW4X4u;bL3s9`xm?e@Adm zR7j>Qo3b|o04RtD1^p&}Ot$t4MsnC7G2UBiW%y->V5PTswrYyW4iAU;H^bDrv9)>W z+$3P=yU`8!S=dXe5#IkjX5rj}cIw43_>WhAMT_oVdE$Wqif$I3Ayoq$M#lH}E3%z7 zM=rV0#eb-v_?t)b-G@K*R?~P^Sx$TCyV*u70RVSzD?V1zp*F0=Ka-tgLyS7HL(yC2 z8=&)yt7}gyMr@zivN5$85di`XIQ*_qQY+-4fj^5qO%@#|Zq6r_gp1^#YidKs*40%s z6)Txm-`0P;eM@wZZLuExTlgRe3o#+hHoLP;+KZdf(jw^N058 z=}dEIf4QK_3aoFpUVK6Un{5*Ha;PtmH~Xqifdl|`&;9C7Jg*w=5{g|q?n(>M1r+bw z@@Htya~PXTY=nMgQUvCn7%{)%K2^L%BhIP3|7KVAoq6|LP>(#Op$n4-kS#Re0& znbua&dRm*b*dZ%@R|T6~(D+jz)Bp#;KsRBseF(h`pf^%m2wh1@;ZZQ_v-;e~%@`ZF zeOZt*pwOlCtYUi>m33dKbO6p{mDuLXWiXP+yxv^BwVZBF_wL5qGxr^FbOU{6+PQS>nI6sto=utbB^=GOR6h4AgwZ#ny=@~ix>hx zKK7f~8#5Y2oW|iIy?x>-zAzK#-D0|fH3YI&Hc3|ZR&&*p^<3=4?Hl3^n)$BF~0 zgnB(3=3{YdnW4Fq@ET)1 zO=%qW(`or#gJ2^h^H940>l3TGc#=u*lpX|=fSyn2-`0%#?p7n-W z{atLM_p&{5U=iAX=sqvZJcEVoJM65D-jC0uZ~!0-^m7R3>4b-;4sVf++wvW~R?Z57 z178?`tO!cDss4IfusRbPQv7_c_YQ@lsy`Ywf9QA~lZ9Ag^a;1T9Z&*Vdq{2cQF>IU zrhuRK-f?xa$6x4(UiC-WITN8rj)BJ>uOsOCw0vkTBOiU_R-A_OD?$1ujUV*h ziRHZ&lin@wQ+IL<2p5y;-G^KAXSRcqrw=Z4Mu(+|j9} zUpw=&FlqbJD86->FhK|SYY)~?5zAdNw>ak!(L0pzxjlC#*$p1Y<#t!v9fP>KO%si#}Yh|_@A+VxTVTHci&*!lYH9`yUdtFQbs|$Un>+)()h&%S4Uym;4p{4N^&+fo* z#J$%G_dAR$JFqea-5&bfx?b+Ahr2%Bn^A5&fN;Rwni^~agGA;Ui)Y~l_uyz_8TCv2 z^SmD3?v3D+_&EA`r1&)$Bla!D*{rusjQuYSLo+a-z+Ib-fTlC$5mo}#b74ZVK#o^MBbS7XY;kXJOrqKJUkNWno)_R$6c7rjIOxa#jkw9 zklVo}>)*#+D=DMp7#j_fiqV7ZzG=X>%G2URhRPb1q(zXfi5aK97xet6FjL(aCPu5|oW>o$t$fp6-Orl9B~y@R>Vd3OQISr1$0%_y zBsY`JB`+VL;+9Hw3e_6xM_Nt1tYL@1@Cwhb)*iE-*#JQ7`VKq0Cu*}?Bxc{owF6u2 z?xGk6WO(D6_tSFPwvD4oel(P_A~?is0IhRq{V77+>etSZTSi*PLtev9heqG1`U?T|7ysLd8d|rlKR4u1GhymVT_w zIx_TdPvxQS zJ!>tLH-A1(nByfsxpOPKv!$sQCy_OuXN#- zWIBC2nx1Ea0Ngiu(IB04|B!`vV-qSlhQGW7JDlchTgh44>8vz=Wq*B`4VEpGqWRzM zSuEWNyRp->Ky&D$P@2evw($R@9Zd%DCnftppESQsWvhSP>KDVBsBnvz%h3T>KFgzv z?d1NIv}(LjfIre$h~OH|<`4I2wZ;=b^Q`CmCWg1L<_Kb{wGRq#%C^K=DV#T?i}3KV z;Ggglh^ND_6qy$woS|9&bx)Gehkeb#}SsBN`%9Y>jEG9i1;~p56COp z#QCgVonBe3U;<)g;^_zQd{q5HOJI_w9Gz78A(-(`W|;-ix0gZp7=s?)(!D8BE&TLi#l-%`6=1) z9VuC@3g6Dxj=KS*S5MF(JIYl%F|?Az^%U(tt`o`F20!P-)m_hp`#Ve>`iWidb9#az zI5+tIj%ts=xzEHwbaZnd?k7Urue|xShdK4?+?XE6^yvXw+ee*`wh1%$#GEDT( z9|J9dLGs9!o;)LTku9V`kNnlA0Y{L)cyh8(j;VjA22ZF6To6?_;k7 zLhUcVo!iZ?gxewq``G z%bL&ruBvsNzq3IOXo$_wxb`t;>i@}VW$!~ep>b;U<5TDO2J5WXSxLE`r7<@T{tVKS zAhd@jvP;P9^aCGY7q0rm+}XK}#hsk+xw`Knn0fD+xwrq)XN4NjJ3e_e@oP7KZNT*C z!r1@ja{BNT(<&nNHZf>d!aQb(qF1HWsFdPb4YaT5tQvD|pbUgZzsZb*Fw8ilD18(9 zV>W`GTdvFJ%X?(45F`A3_pl3>cb-|OR&!tS>(e{dSi7Za)H1%Qbujse{^R#P6_gID z(~167gBi>!b8f_%OSfIicDZ}o;DHAbDYeKYxw;-x9HRogv#FmTIhPJarkS`GsS~K26Nz5JL?w6ClmmnN586Sc3 zoFPb^7l=1d))5nWnO)`5bEb$jR2_aj#n0^yQH;r44{q2M)goMne@M%u4;VvV7RR)v zAUDRC{{DQFt=7ruWz(pX`BGRqka}h4k}=#5yp0e90BCr(P`VYbbpu{5aAOP8En@o~ zq5rjQW!x^mMtuw`xPko8<#RoLq=_)NFSz9cw~OFiiD-PpWc*8S98ez*T*ixOdFnGP zw(}wHS*E-W$I-2Xra|1Air%XiNtc;t4nv{F7F0XP!M@@!p&&6%+I;tEzm{B^SKG#4 zYn|jg9<1|4k6=WVmxECe(OUcyZuIZzP2UP6bbS?xYfYdY1(f5z0k&xUdiSQuUwt$|&K)(>pfsFH z489jd#%N)R`4luD`v^Y7YOK2fX}wCL{AznVJFe%^pgY~eg9K(Y9>wHrfy@;YOe-LL z@4eiP?{aD&|GuKnj}el-c8{iBR@DiHyK8!IWO^HYpT@e1J=N8h%+Yx6sFVMl#0RHH zha!h|6v?>UZPC-NIa9ukW#vi8iIQt%Djdza=g8uMsrMSc_{X{=?iFbW8qz}?Kt&;qC)#$s7qtC9a%&5*gLJvu2-%5N0)6*)_Czrli9 zMgM5{Lt7oCt8=xprrudlsCcVRiSoCSAVlX=p_mD^4lWSoq-UonOgzoI;PQqN0aO zP7b-1TOJ+xS-Wd)WG-t{NEQ`E1j-T0kgJ~;wtBENRR{W2$MsW94 z+*;SunEDx_JYe-pqV?rMRttq2^b-G9X?Ok(_4Wq<{DX?f5<_IGi()L1oh;q5T?*L^ zH%11bVa#N?j6s%=WE*2&LWXQ%gt25RvJApBmO;qAGYDVzyFB*~`2O;K&hviW=l65o z@8_HsrT0WJpwaIKV&L&ez&7GwtZ+D{19R>LK#^cOpAz@s050LjRB z=6+hg-IKupv|O?pzJx#-`etw{jqa86X*D}8zQn(_Y-#tBLDr ztG7xp1hePVWc)_Omc+gJyDEB-$!bT=J_Pdkgd?wcPVQ11BvOw{B+vSE>+(HjumvD6 zJfX_E{}ZQ!!7(nz+G7bi6YU{42Rmzry3I8~q~!|3JkC#7neY}N1GG$?Zvd51J3mgM zV1}z%5yjKY@|7a-B(TJ+L)sZHyvVtqnV@S|6%*$aalwvoy3@5`JRa0X#abn^T$%j% ztR_pnZ(rTT?L<3@@xoo)8VMizrT9b<=j1Q@3zO;;$Y(=W5IyQj{O%pYoUBQEYRM6j zL-JYX8$P+*!7yM$T(6lp#eTg;pOcDFlEw&{YRvwL^PlvuT|ApCZW^5b(I9%3rvx7L^Y zTW;qYoCCgZsS7LbL`I`Y;8IiO{d&>ae)LGv__L>oC&mjJ4pQ`P4ss#`OdMwlJ(H{G zfPM}j{&zHef-O4xdUwxCF2~n=Nq6WFSr+`LC+?f^hh%mQu?ut<%s}c{ho4HRn0W7q zffBWP`1ev>hgOI&f@*^>^+vHA000fYb&N_7-yS^&gPp z%RnffPLPc^NQy~!Ex=}JOk}jNd<5ZBu={e#J(jO~WvYZ-7BIktF&dLkf++g8ca6Fa z^2GL-uLjlxs_@ah-{KjadWJ_uKJ+qqXP^KuB_Z3rW^qrK`C1>D6^YyTJ6C8|)_GC9 zjM~e!1b^*CtZmn14Wvj&^cLMuSo0L@Q?trR7d>uKJ&IaizG_jm#_*nQ0JUOM4`8(?0g#^6H9;8@yg2Z;NJ9;~!T>4$6fmkStnW1-dN zV3(#XBZl2RNc8T}AsW8I>Sa!Fn|?T?e~C{?#_z*7Q`fp=MrfamC)oIQB!jn1#|C8$ zZMU8;cSz-c!0CVuUhV?;e#%u33i+A?p3uVuhq-^DzCs4#4TI~3-XyI|eWSgJ>fXue7uhea9=tvRPoC7a5+0p|@Ps>5b zn+a3I*}Ob|zc+{|3>owF2no$x%bo9VDXK2m-8+5n9qFg^I~kh3T1qj(isDY0&PWQrn|O=!nj8ce=hSNFF~(vr(?g}vjNV;QYhd3-CuQ%w{% z2kjvxRLEBnN;jVCJ>XCuNS@a@wY}2Oj~5u{CX`seL-%f$;flPHIz*>xlxrC=kGxrR zUFt%k=XXx$7&h!T(~(JUwV%dR#6bDCVwOZt;~IxQrS~t z=c2wQe{E*v(QNMhHf#SI*I&;0AGI%|Uc|T`r5YL>L3DtDq1254OXHf5BL%FS6S6}5 za`(}ct50WGU6FiQy_YDXv#y6+gj#=jw1*%?bRzTXtr+fc%3k(kFIp8@zIJSIhB`TV z7+1RmDl9EL-w-pG9k-~1tdxUcpZ(nKbo+yGgNN+QLO9;(?FAae9G#S$Iymo5=e+}u zw$^H1Xy2Erj1bI4{~TRxR1h(@^RkLnG^Tkg$tSRd7qChvG!a zOG<2pp&M%L2}oZF>GGO**QnsRDTDHCM*gt6WL4hlA00LTAaV~k1Lqg`>w%$fjggAJ z>}X~4@m*uLUWWxkC$jyJco%PTzMZ)#RRscy`ebUE*jNo~wEzH9>;uknuE_?`X*#u6 zOpA*)7S_bhxUjNG{=X)vrLxk+ve9egP9>5Q4+GECDyI*S>zEAtso{YP zc`$<1pL{cU#TIQ}Tj?SFmS>v-Nfz(=DNoN+n7OI}->cFOB;t+) z^1k0%zJ?(?%UJSi@jA(MgbhDmzEI8d6(U}zS&0sCNX)B}F|mgpts}ahnhUt^Se#x% zZfnNib+A*`b*%%V19H?V!?FyRtzcgyg{)~Am1SF_xg&A?`bp<*ztHYB`ceJkCf6_6 z)1edZp^E@5J>x;}ZkvlX$D|he$B3{+ls8Q4-B>}-sB0XLz^6rzt5m9a_N&m zky>1mfo5;-*!!IRQu6cBoAHjRD2#%9`SQ>%tgG`kiKmJt`VNX~O9|8NsBTHxWujlS zEi2P`t2E_&D4)dL1ymcyV*vwR4T!wnmn3)-(IPPQ&ClcTuFSo|#)d8`d_WU0%z( zQk5U&Hm?+kpOAC{Sq-1F7_*YCFJk__j?01LIe*rpD>Y)Gt diff --git a/Documentation/docs/contributing/data.md b/Documentation/docs/contributing/data.md index 81eb8dc4eda..56f7716d9b6 100644 --- a/Documentation/docs/contributing/data.md +++ b/Documentation/docs/contributing/data.md @@ -11,7 +11,8 @@ generation) also applies to any other data contained in a text file that a test may require, if any. If you just want to browse and download the ITK testing images, browse the -[ITKData Datalad repository]. +[ITKTestingData repository]. Historical snapshots are also archived in +the [ITKData DataLad repository]. Setup ----- @@ -43,7 +44,29 @@ associated with these files. Generate the *.cid* content link from your test data file, *MyTest.png* in this example, with the [content-link-upload] web app. This app will upload the data to IPFS and provide a *.cid* CMake ExternalData content link file -to download. +to download. This is the easiest and recommended way to upload new test data. + +For advanced command line driven uploads, the upload script at +`Utilities/Maintenance/ExternalDataUpload/` can be used: + +```bash +Utilities/Maintenance/ExternalDataUpload/ipfs-upload.sh \ + Modules/.../test/Baseline/MyTest.png +``` + +The script adds the file to your local IPFS node under the UnixFS v1 2025 +profile, pins it on the `itk-pinata` and `itk-filebase` remote pinning +services, and replaces the original file with `MyTest.png.cid` containing the +resulting CID. The CID and source-tree path are also recorded in +`Testing/Data/content-links.manifest`. + +For advanced CLI usage, first-time users must complete the one-time Kubo + pinning-service setup +documented in +[`Utilities/Maintenance/ExternalDataUpload/README.md`] before the script will +succeed. Contributors who cannot run a local Kubo daemon may instead use +the [content-link-upload] web app, which pins to [Filebase] and [Pinata] and returns +a `.cid` file directly — manifest and mirror updates must then be added by +hand. For more details, see the description and procedures in [Upload Binary Data]. @@ -142,5 +165,9 @@ the [InterPlanetary File System (IPFS)]. [CMake ExternalData: Using Large Files with Distributed Version Control]: https://blog.kitware.com/cmake-externaldata-using-large-files-with-distributed-version-control/ [content-link-upload]: https://content-link-upload.itk.org [InterPlanetary File System (IPFS)]: https://ipfs.tech/ -[ITKData Datalad repository]: https://gin.g-node.org/InsightSoftwareConsortium/ITKData/src/main +[ITKData DataLad repository]: https://gin.g-node.org/InsightSoftwareConsortium/ITKData/src/main +[ITKTestingData repository]: https://github.com/InsightSoftwareConsortium/ITKTestingData [Upload Binary Data]: upload_binary_data.md +[`Utilities/Maintenance/ExternalDataUpload/README.md`]: https://github.com/InsightSoftwareConsortium/ITK/blob/main/Utilities/Maintenance/ExternalDataUpload/README.md +[Filebase]: https://filebase.com/ +[Pinata]: https://pinata.cloud/ diff --git a/Documentation/docs/contributing/upload_binary_data.md b/Documentation/docs/contributing/upload_binary_data.md index 905bac11932..b94e1655b29 100644 --- a/Documentation/docs/contributing/upload_binary_data.md +++ b/Documentation/docs/contributing/upload_binary_data.md @@ -34,34 +34,47 @@ adopting Web3, we gain: - **Sustainability** Contributors to the ITK upload their data through a simple web app -that utilizes an easy-to-use, permissionless, free service, [web3.storage]. +that utilizes an easy-to-use, permissionless service, [Pinata]. Data used in the ITK Git repository is periodically tracked in a dedicated DataLad repository, the [ITKData DataLad repository]. and stored across redundant locations so it can be retrieved from any of the following: -- Local [IPFS](https://ipfs.io/) nodes -- Peer [IPFS](https://ipfs.io/) nodes -- [web3.storage](https://web3.storage/) -- [pinata.cloud](https://pinata.cloud) -- Kitware's IPFS Server -- [ITKTestingData](https://github.com/InsightSoftwareConsortium/ITKTestingData) GitHub Pages CDN +Contributors upload their data by running a small shell script that pushes +the file into [IPFS] via a local [Kubo] daemon, pins it on redundant +community-run pinning services, records the resulting CID in a manifest, and +(optionally) mirrors the bytes into the [ITKTestingData] GitHub Pages +repository. See [`Utilities/Maintenance/ExternalDataUpload/README.md`] for +the one-time developer setup and full workflow. + +Data referenced from the ITK Git repository is stored across redundant +locations so it can be retrieved from any of the following at build time: + +- Local [Kubo] gateway (typically `127.0.0.1:8080`) +- [ITKTestingData] GitHub Pages mirror +- [Pinata] (community pinning service, remote name `itk-pinata`) +- [Filebase] (community pinning service, remote name `itk-filebase`) +- Public IPFS HTTP gateways (`ipfs.io`, `dweb.link`, `cloudflare-ipfs.com`) - Kitware's Apache HTTP Server -- Local testing data cache +- Local `ExternalData_OBJECT_STORES` cache - Archive tarballs from GitHub Releases +- Historical [ITKData DataLad repository] snapshots (older content links) ![ITK testing data figure](./itk-testing-data.png) -*Testing data workflow. Testing or example data is uploaded to IPFS via the content-link-upload.itk.org web app. -This pins the data on multiple servers across the globe. -At release time, the data is also pinned on multiple servers in the USA and France and community pinners. -At release time, the data is also stored in the DataLad Git repository, served on an Apache HTTP server, and the GitHub Pages CDN. -At test time an ITK build can pull the data from a local cache, archive tarball, the Apache HTTP server, GitHub Pages CDN, or multiple IPFS HTTP gateways.* +*Testing data workflow. Testing or example data is uploaded to IPFS via the +content-link-upload.itk.org web app. New content is added with the +`Utilities/Maintenance/ExternalDataUpload/ipfs-upload.sh` script, which pushes +the bytes to a local [Kubo] node and pins them on `itk-pinata` and +`itk-filebase` for redundancy. The resulting CID is written as a `.cid` +content link in the ITK source tree and recorded in +`Testing/Data/content-links.manifest`. Files ≤ 50 MB can additionally be +mirrored into [ITKTestingData] for GitHub Pages CDN delivery. At test time an +ITK build can fetch the data from a local cache, archive tarball, the Apache +HTTP server, the GitHub Pages mirror, or any of several IPFS HTTP gateways.* -See also our [Data](data.md) guide for more information. If you just -want to browse and download the ITK testing images, see the -[ITKData DataLad repository]. +See also our [Data](data.md) guide for more information. Adding images as input to ITK sources ------------------------------------- @@ -89,88 +102,112 @@ need to be followed: Upload new testing data ----------------------- -### Prerequisites +### Web app -[web3.storage] is a decentralized IPFS storage -provider where any ITK community member can upload binary data files. -There are two primary methods available to upload data files: +The easiest, recommended way to upload data is The [Content Link Upload] browser interface. -A. The CMake ExternalData Web3 upload browser interface. -B. The w3 command line executable that - comes with the [@web3-storage/w3cli] Node.js NPM package. +### CLI one-time setup -Once files have been uploaded, they will be publicly -available and accessible since data is content addressed on the IPFS -peer-to-peer network. +The upload workflow requires: -In addition to these two methods, documented in detail below, another -possibility includes pinning the data on IPFS with [other pinning services] -and creating the content link file manually. The content link file is simply a -plan text file with a `.cid` extension whose contents are the CID file. -However, the documented two methods are recommended due to their simplicity -and in order to keep CID values consistent. +- A local [Kubo] daemon (CLI or IPFS Desktop) with the **UnixFS v1 2025** + profile applied, so CIDs are reproducible across implementations + (`ipfs config profile apply unixfs-v1-2025`, Kubo ≥ 0.40.0). +- Two remote pinning services configured under the exact names + `itk-pinata` and `itk-filebase`. The upload script looks up these names + and fails if they are missing. -At release time, the release manager uploads and archives repository data -references in other storage locations for additional redundancy. +The full step-by-step setup — installing Kubo, signing up with +[Pinata] and [Filebase], and registering each service as a remote — +is documented in +[`Utilities/Maintenance/ExternalDataUpload/README.md`]. Complete that +one-time setup before proceeding. -### Option A) Upload Via the Web Interface +### Upload a file -Use the [Content Link Upload] -tool ([Alt Link]) to -upload your data to the [IPFS] and download the -corresponding CMake content link file. +From the ITK source tree, run the upload script with the path to the file +you want to upload: -![[CMake ExternalData Web3 -Content Link Upload](https://content-link-upload.itk.org/)](./content-link-upload.png) +```bash +Utilities/Maintenance/ExternalDataUpload/ipfs-upload.sh \ + Modules/.../test/Baseline/MyTest.png +``` -### Option B) Upload Via CMake and Node.js CLI +The script will: -Install the w3 CLI with the -[@web3-storage/w3cli] [Node.js] package: +1. Add the file to IPFS with `--cid-version=1` under the UnixFS v1 2025 + profile, producing a deterministic CID. +2. Pin locally, then on `itk-pinata` and `itk-filebase`. By default the + script waits until each remote reports `pinned`, which surfaces + failures immediately but can take minutes per file as the remote + fetches the content. For batch runs pass `--background` to submit + pins asynchronously and verify afterwards with + `ipfs pin remote ls --status=queued,pinning,pinned`. +3. Replace `MyTest.png` in the source tree with `MyTest.png.cid` — a + one-line text file containing the CID. +4. Append the CID and source-tree path to + `Testing/Data/content-links.manifest`. +5. Print the `git rm` / `git add` commands needed to stage the change. -```bash -npm install -g @web3-storage/w3cli -``` +### Mirror to ITKTestingData (optional but recommended) -Login in with your credentials. +Pass `--testing-data-repo ` to additionally copy the file into a +local clone of [ITKTestingData] at `CID/`: ```bash -w3 login +Utilities/Maintenance/ExternalDataUpload/ipfs-upload.sh \ + --testing-data-repo ~/src/ITKTestingData \ + Modules/.../test/Baseline/MyTest.png ``` -Create an w3externaldata bash/zsh -function: +This populates the GitHub Pages mirror gateway +(`https://insightsoftwareconsortium.github.io/ITKTestingData/CID/`) +already listed in [`CMake/ITKExternalData.cmake`]. Commit and push in +the `ITKTestingData` repo to publish. Files larger than **50 MB** are +skipped for the mirror step only (GitHub rejects pushes containing +files over 50 MB per file) — IPFS pinning on `itk-pinata` and +`itk-filebase` still proceeds for those files. -```bash -function w3externaldata() { w3 put $1 --no-wrap | tail -n 1 | awk -F "/ipfs/" '{print $2}' | tee $1.cid } -``` +### Alternative: upload via the web app + +Contributors who prefer not to run a local [Kubo] daemon can upload a file +through the [Content Link Upload] web app ([Alt Link]). The app pins the +file on [web3.storage] and returns the corresponding `.cid` content link +to download. The resulting CID is usable anywhere the script-produced CID +would be — but the manifest entry and the optional [ITKTestingData] +mirror must then be added by hand. The script-based workflow above is +preferred when available because it also updates +`Testing/Data/content-links.manifest` and pins on the ITK community +services in one step. + +### Normalize existing content links -Call the function with the file to be uploaded. This command will -generate the \.cid content -link: +Older `.md5` / `.sha256` / `.sha512` content links can be converted to +`.cid`, and existing `.cid` links can be regenerated under the UnixFS +v1 2025 profile, with: ```bash -w3externaldata - 1 file (0.3MB) -⁂ Stored 1 file -bafkreifpfhcc3gc7zo2ds3ktyyl5qrycwisyaolegp47cl27i4swxpa2ey +Utilities/Maintenance/ExternalDataUpload/content-link-normalize.sh ``` -### Add the content link to the source tree +See [`Utilities/Maintenance/ExternalDataUpload/README.md`] for the full +set of options (`--dry-run`, `--hash-only`, `--cid-only`, `--background`, +`--testing-data-repo`). -Add the file to the repository in the directory referenced by the -*CMakeLists.txt* script. Move the content link file to the **source tree** at -the location where the actual file is desired in the build tree. +### Add the content link to the source tree -Stage the new file to your commit: +The upload script prints the exact commands to stage: ```bash -git add -- path/to/file.cid +git add path/to/MyTest.png.cid +git add Testing/Data/content-links.manifest +git commit ``` -Next time CMake configuration runs, it will find the new content link. During -the next project build, the data file corresponding to the content link will -be downloaded into the build tree. +Next time CMake configuration runs, it will find the new content link. +During the next project build, the data file corresponding to the +content link will be downloaded into the build tree from the first +reachable gateway in [`CMake/ITKExternalData.cmake`]. [Alt Link]: https://content-link-upload.itk.eth.limo [Analyze format]: http://www.grahamwideman.com/gw/brain/analyze/formatdoc.htm @@ -178,18 +215,17 @@ be downloaded into the build tree. [Content Link Upload]: https://content-link-upload.itk.org [CONTRIBUTING.md]: ../CONTRIBUTING.md [CMake]: https://cmake.org/ +[`CMake/ITKExternalData.cmake`]: https://github.com/InsightSoftwareConsortium/ITK/blob/main/CMake/ITKExternalData.cmake +[Filebase]: https://filebase.com/ [Git]: https://git-scm.com/ [IPFS]: https://ipfs.io/ -[ITKData Datalad repository]: https://gin.g-node.org/InsightSoftwareConsortium/ITKData/src/main [ITK community]: https://discourse.itk.org/ [ITK Sphinx Examples]: https://itk.org/ITKExamples/index.html [ITK Software Guide]: https://itk.org/ItkSoftwareGuide.pdf +[ITKData DataLad repository]: https://gin.g-node.org/InsightSoftwareConsortium/ITKData/src/main [ITKTestingData]: https://github.com/InsightSoftwareConsortium/ITKTestingData -[MD5 hash]: https://en.wikipedia.org/wiki/MD5 +[Kubo]: https://github.com/ipfs/kubo [multiformats]: https://multiformats.io/ -[Node.js]: https://nodejs.org/ -[other pinning services]: https://docs.ipfs.tech/how-to/work-with-pinning-services/ -[SHA512 hash]: https://en.wikipedia.org/wiki/SHA-2 +[Pinata]: https://pinata.cloud/ [solution to this problem]: https://blog.kitware.com/cmake-externaldata-using-large-files-with-distributed-version-control/ -[web3.storage]: https://web3.storage/ -[@web3-storage/w3cli]: https://www.npmjs.com/package/@web3-storage/w3cli +[`Utilities/Maintenance/ExternalDataUpload/README.md`]: https://github.com/InsightSoftwareConsortium/ITK/blob/main/Utilities/Maintenance/ExternalDataUpload/README.md From 0e9dd0b685970e859c1198aedcbef27c2b5d3edd Mon Sep 17 00:00:00 2001 From: Matt McCormick Date: Thu, 30 Apr 2026 14:40:13 -0400 Subject: [PATCH 6/7] ENH: Make itk-pinata pinning service optional MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Pinata's `pin remote add` endpoint (the IPFS Pinning Service API) is gated to paid plans — the free plan rejects pin-by-CID with PAID_FEATURE_ONLY (HTTP 403), as reported by @hjmjohnson while exercising the new ExternalDataUpload skill. Filebase's free tier still accepts PSA pin-by-CID, so it remains the baseline pin provider for contributors who don't have a paid Pinata account. ipfs-upload.sh now splits its remote-pinning configuration into a required list (`itk-filebase`) and an optional list (`itk-pinata`): the script aborts if Filebase isn't registered, but logs an informational notice and continues if Pinata isn't. The remote-pin loop walks the merged ACTIVE_SERVICES list so Pinata is still pinned to whenever it is configured. The reorder also surfaces Filebase first in every user-facing list (storage locations, log lines, manifest-skipped warnings, README setup section, contributor docs) to match the new "required first, optional second" hierarchy. Documentation in README.md, SKILL.md, Documentation/docs/contributing/ upload_binary_data.md, and Documentation/docs/contributing/data.md is updated to reorder Filebase ahead of Pinata, mark Pinata as optional, and explain the paid-plan restriction. README.md gains a troubleshooting entry for the PAID_FEATURE_ONLY error pointing at `ipfs pin remote service rm itk-pinata` as the cleanest fix when no paid plan is available. Agent-Session-Id: 40f8eba4-dc94-4d4f-94bd-ff3d2fccf04f Co-Authored-By: Claude Opus 4.7 --- Documentation/docs/contributing/data.md | 8 +- .../docs/contributing/upload_binary_data.md | 49 +++++---- .../Maintenance/ExternalDataUpload/README.md | 101 +++++++++++------- .../Maintenance/ExternalDataUpload/SKILL.md | 19 +++- .../content-link-normalize.sh | 5 +- .../ExternalDataUpload/ipfs-pin-all.sh | 4 +- .../ExternalDataUpload/ipfs-upload.sh | 41 +++++-- 7 files changed, 149 insertions(+), 78 deletions(-) diff --git a/Documentation/docs/contributing/data.md b/Documentation/docs/contributing/data.md index 56f7716d9b6..b1b2c9d3aa8 100644 --- a/Documentation/docs/contributing/data.md +++ b/Documentation/docs/contributing/data.md @@ -55,9 +55,11 @@ Utilities/Maintenance/ExternalDataUpload/ipfs-upload.sh \ ``` The script adds the file to your local IPFS node under the UnixFS v1 2025 -profile, pins it on the `itk-pinata` and `itk-filebase` remote pinning -services, and replaces the original file with `MyTest.png.cid` containing the -resulting CID. The CID and source-tree path are also recorded in +profile, pins it on the `itk-filebase` remote pinning service (and on +`itk-pinata` when that service is configured — Pinata is optional because +its pin-by-CID endpoint requires a paid plan), and replaces the original +file with `MyTest.png.cid` containing the resulting CID. The CID and +source-tree path are also recorded in `Testing/Data/content-links.manifest`. For advanced CLI usage, first-time users must complete the one-time Kubo + pinning-service setup diff --git a/Documentation/docs/contributing/upload_binary_data.md b/Documentation/docs/contributing/upload_binary_data.md index b94e1655b29..9e3f7a46be2 100644 --- a/Documentation/docs/contributing/upload_binary_data.md +++ b/Documentation/docs/contributing/upload_binary_data.md @@ -53,8 +53,9 @@ locations so it can be retrieved from any of the following at build time: - Local [Kubo] gateway (typically `127.0.0.1:8080`) - [ITKTestingData] GitHub Pages mirror -- [Pinata] (community pinning service, remote name `itk-pinata`) - [Filebase] (community pinning service, remote name `itk-filebase`) +- [Pinata] (community pinning service, remote name `itk-pinata`, + *optional* — pin-by-CID requires a paid Pinata plan) - Public IPFS HTTP gateways (`ipfs.io`, `dweb.link`, `cloudflare-ipfs.com`) - Kitware's Apache HTTP Server - Local `ExternalData_OBJECT_STORES` cache @@ -65,14 +66,15 @@ locations so it can be retrieved from any of the following at build time: *Testing data workflow. Testing or example data is uploaded to IPFS via the content-link-upload.itk.org web app. New content is added with the -`Utilities/Maintenance/ExternalDataUpload/ipfs-upload.sh` script, which pushes -the bytes to a local [Kubo] node and pins them on `itk-pinata` and -`itk-filebase` for redundancy. The resulting CID is written as a `.cid` -content link in the ITK source tree and recorded in -`Testing/Data/content-links.manifest`. Files ≤ 50 MB can additionally be -mirrored into [ITKTestingData] for GitHub Pages CDN delivery. At test time an -ITK build can fetch the data from a local cache, archive tarball, the Apache -HTTP server, the GitHub Pages mirror, or any of several IPFS HTTP gateways.* +`Utilities/Maintenance/ExternalDataUpload/ipfs-upload.sh` script, which +pushes the bytes to a local [Kubo] node and pins them on `itk-filebase` +(and `itk-pinata` if a paid Pinata plan is configured) for redundancy. The +resulting CID is written as a `.cid` content link in the ITK source tree +and recorded in `Testing/Data/content-links.manifest`. Files ≤ 50 MB can +additionally be mirrored into [ITKTestingData] for GitHub Pages CDN +delivery. At test time an ITK build can fetch the data from a local cache, +archive tarball, the Apache HTTP server, the GitHub Pages mirror, or any +of several IPFS HTTP gateways.* See also our [Data](data.md) guide for more information. @@ -113,13 +115,17 @@ The upload workflow requires: - A local [Kubo] daemon (CLI or IPFS Desktop) with the **UnixFS v1 2025** profile applied, so CIDs are reproducible across implementations (`ipfs config profile apply unixfs-v1-2025`, Kubo ≥ 0.40.0). -- Two remote pinning services configured under the exact names - `itk-pinata` and `itk-filebase`. The upload script looks up these names - and fails if they are missing. +- The [Filebase] remote pinning service registered as `itk-filebase`. This + is **required** — it works on Filebase's free tier. +- Optionally, the [Pinata] remote pinning service registered as + `itk-pinata`. Pinata's pin-by-CID endpoint requires a **paid plan** + (the free plan rejects PSA `pin remote add` with `PAID_FEATURE_ONLY`), + so configure this only if you have a paid Pinata account; the upload + script skips it with a notice when it isn't registered. The full step-by-step setup — installing Kubo, signing up with -[Pinata] and [Filebase], and registering each service as a remote — -is documented in +[Filebase] (and optionally [Pinata]), and registering each service +as a remote — is documented in [`Utilities/Maintenance/ExternalDataUpload/README.md`]. Complete that one-time setup before proceeding. @@ -137,11 +143,12 @@ The script will: 1. Add the file to IPFS with `--cid-version=1` under the UnixFS v1 2025 profile, producing a deterministic CID. -2. Pin locally, then on `itk-pinata` and `itk-filebase`. By default the - script waits until each remote reports `pinned`, which surfaces - failures immediately but can take minutes per file as the remote - fetches the content. For batch runs pass `--background` to submit - pins asynchronously and verify afterwards with +2. Pin locally, then on `itk-filebase` (and on `itk-pinata` if it is + registered — otherwise the script logs a notice and continues). By + default the script waits until each remote reports `pinned`, which + surfaces failures immediately but can take minutes per file as the + remote fetches the content. For batch runs pass `--background` to + submit pins asynchronously and verify afterwards with `ipfs pin remote ls --status=queued,pinning,pinned`. 3. Replace `MyTest.png` in the source tree with `MyTest.png.cid` — a one-line text file containing the CID. @@ -165,8 +172,8 @@ This populates the GitHub Pages mirror gateway already listed in [`CMake/ITKExternalData.cmake`]. Commit and push in the `ITKTestingData` repo to publish. Files larger than **50 MB** are skipped for the mirror step only (GitHub rejects pushes containing -files over 50 MB per file) — IPFS pinning on `itk-pinata` and -`itk-filebase` still proceeds for those files. +files over 50 MB per file) — IPFS pinning on `itk-filebase` (and on +`itk-pinata` when configured) still proceeds for those files. ### Alternative: upload via the web app diff --git a/Utilities/Maintenance/ExternalDataUpload/README.md b/Utilities/Maintenance/ExternalDataUpload/README.md index 33a01c28283..c7175fcf514 100644 --- a/Utilities/Maintenance/ExternalDataUpload/README.md +++ b/Utilities/Maintenance/ExternalDataUpload/README.md @@ -84,32 +84,16 @@ References: ### 3. Configure Remote Pinning Services -The upload script pins content on two remote services for redundancy, matching -the gateways declared in `CMake/ITKExternalData.cmake`. Both services must be -configured under the **exact names `itk-pinata` and `itk-filebase`** — the -upload script looks up those names and fails if they are missing. +The upload script pins content on community-run remote services for +redundancy alongside the GitHub Pages mirror, matching the gateways +declared in `CMake/ITKExternalData.cmake`. Use the **exact service names** +`itk-filebase` (required) and `itk-pinata` (optional) — the upload script +looks up those names. -#### Pinata (service name: `itk-pinata`) +#### Filebase (service name: `itk-filebase`, **required**) -1. Sign up at -2. Create an API key at - - Enable **pinByHash** and **pinFileToIPFS** permissions -3. Copy the JWT token and add the service (use a prompt to avoid leaking - the token into shell history): - -```bash -printf "Pinata JWT: " && read -rs PINATA_JWT && echo -ipfs pin remote service add itk-pinata https://api.pinata.cloud/psa "$PINATA_JWT" -``` - -4. Verify: - -```bash -ipfs pin remote service ls -# Should show: itk-pinata https://api.pinata.cloud/psa -``` - -#### Filebase (service name: `itk-filebase`) +Filebase's IPFS Pinning Service endpoint accepts pin-by-CID on the free +tier, so this is the baseline pinning provider for ITK. 1. Sign up at 2. Create an **IPFS bucket** at @@ -129,6 +113,34 @@ ipfs pin remote service ls # Should show: itk-filebase https://api.filebase.io/v1/ipfs ``` +#### Pinata (service name: `itk-pinata`, **optional — paid plan**) + +Pinata's `pin remote add` endpoint (the IPFS Pinning Service API) is +restricted to **paid plans** — the free plan rejects pin-by-CID with +`PAID_FEATURE_ONLY` (HTTP 403). Configure this service only if you have a +paid Pinata plan; otherwise leave it out and the upload script will skip +it with an informational message. Filebase + the GitHub Pages mirror still +provide redundancy. + +1. Sign up at and select a paid plan that includes + pin-by-CID +2. Create an API key at + - Enable **pinByHash** and **pinFileToIPFS** permissions +3. Copy the JWT token and add the service (use a prompt to avoid leaking + the token into shell history): + +```bash +printf "Pinata JWT: " && read -rs PINATA_JWT && echo +ipfs pin remote service add itk-pinata https://api.pinata.cloud/psa "$PINATA_JWT" +``` + +4. Verify: + +```bash +ipfs pin remote service ls +# Should show: itk-pinata https://api.pinata.cloud/psa +``` + ## Usage ### Upload a single file @@ -141,7 +153,8 @@ The script will: 1. Add the file to IPFS with `--cid-version=1` (UnixFS v1 2025 profile) 2. Pin it locally -3. Pin it on `itk-pinata` and `itk-filebase` +3. Pin it on `itk-filebase` (and on `itk-pinata` if that service is + configured; otherwise the script logs a notice and continues) 4. Replace the original file with `.cid` containing the CID 5. Append/update an entry in `Testing/Data/content-links.manifest` 6. Print the `git rm` / `git add` commands to stage the change @@ -164,9 +177,9 @@ Utilities/Maintenance/ExternalDataUpload/ipfs-upload.sh \ **GitHub 50 MB file size limit.** `ITKTestingData` is hosted on GitHub, which hard-rejects pushes containing files larger than **50 MB** per file. The upload script checks the file size before mirroring and refuses to copy files over -50 MB into the `ITKTestingData` tree. IPFS pinning (local + `itk-pinata` + -`itk-filebase`) still proceeds for oversized files — the mirror step is the -only one that gets skipped, with a clear warning. +50 MB into the `ITKTestingData` tree. IPFS pinning (local + `itk-filebase`, +plus `itk-pinata` when configured) still proceeds for oversized files — the +mirror step is the only one that gets skipped, with a clear warning. Commit the staged `CID/` file in `ITKTestingData` and push; the `gh-pages` workflow on that repo republishes the new file at the GitHub Pages @@ -212,10 +225,10 @@ The script will, for each content link under the given path: link is left untouched and reported. 3. Re-materialize the actual file next to the content link, then invoke `ipfs-upload.sh` on it so the new CID is produced under the UnixFS v1 2025 - profile, pinned locally and on `itk-pinata` / `itk-filebase`, and (if - `--testing-data-repo` is passed) mirrored into `ITKTestingData`. The old - `.md5` / `.sha256` / `.sha512` link is removed; a `.cid` link is written in - its place. + profile, pinned locally and on `itk-filebase` (and `itk-pinata` if + configured), and (if `--testing-data-repo` is passed) mirrored into + `ITKTestingData`. The old `.md5` / `.sha256` / `.sha512` link is removed; + a `.cid` link is written in its place. Common options: @@ -247,8 +260,9 @@ queues the pin and fetches the content itself, and the script returns right away. Check final pin state with: ```bash -ipfs pin remote ls --service=itk-pinata --status=queued,pinning,pinned ipfs pin remote ls --service=itk-filebase --status=queued,pinning,pinned +# Only when itk-pinata is configured (paid Pinata plan): +ipfs pin remote ls --service=itk-pinata --status=queued,pinning,pinned ``` Both scripts also pre-check each remote for an existing pin on the same @@ -321,13 +335,26 @@ Start the daemon: `ipfs daemon` in a separate terminal, or launch IPFS Desktop. The script tests the connection with `ipfs swarm peers`, which requires an active daemon. -### `Required pinning service 'itk-pinata' is not configured` +### `Required pinning service 'itk-filebase' is not configured` Run `ipfs pin remote service ls` to see configured services. Re-add with the commands in step 3 above. Tokens may have expired if you revoked the API key. -The script intentionally refuses to upload if either `itk-pinata` or -`itk-filebase` is missing: a single pin provider is not enough redundancy for -test data CI relies on. +The script intentionally refuses to upload if `itk-filebase` is missing — it +is the baseline pin provider that works on the free tier. `itk-pinata` is +optional (paid plan only); if it isn't registered the script prints a +notice and continues. + +### `PAID_FEATURE_ONLY` / `403 Forbidden` from Pinata + +Pinata's free plan no longer accepts pin-by-CID via the IPFS Pinning Service +API; their `pin remote add` endpoint is gated to paid plans. If you don't +have a paid plan, remove the service so the upload script skips it cleanly: + +```bash +ipfs pin remote service rm itk-pinata +``` + +Filebase + the GitHub Pages mirror still provide redundancy. ### Remote pin failed diff --git a/Utilities/Maintenance/ExternalDataUpload/SKILL.md b/Utilities/Maintenance/ExternalDataUpload/SKILL.md index 77c1d8ed0b6..78d983ebf9e 100644 --- a/Utilities/Maintenance/ExternalDataUpload/SKILL.md +++ b/Utilities/Maintenance/ExternalDataUpload/SKILL.md @@ -2,8 +2,10 @@ name: external-data-upload description: > Upload ITK test data to IPFS and produce .cid content links, pin on - itk-pinata and itk-filebase, optionally mirror into ITKTestingData, and - normalize existing .md5 / .sha256 / .cid content links. Use when the + itk-filebase (and itk-pinata if configured — Pinata is optional because + pin-by-CID requires a paid plan there), optionally mirror into + ITKTestingData, and normalize existing .md5 / .sha256 / .cid content + links. Use when the user wants to add test images, baseline data, or model files under Testing/Data/ or a module's data/ directory, or when asked to convert hash-based content links to CID. @@ -28,8 +30,14 @@ Required: - IPFS daemon running (`ipfs daemon` or IPFS Desktop) - UnixFS v1 2025 profile applied (`ipfs config profile apply unixfs-v1-2025`) -- `itk-pinata` remote pinning service configured -- `itk-filebase` remote pinning service configured +- `itk-filebase` remote pinning service configured (works on the free tier) + +Optional: + +- `itk-pinata` remote pinning service configured. Pinata's pin-by-CID + endpoint is **paid-only** — the free plan rejects PSA `pin remote add` + with `PAID_FEATURE_ONLY` (403). The upload script skips this service + with a notice if it isn't registered. ## Tasks this skill handles @@ -53,7 +61,8 @@ Utilities/Maintenance/ExternalDataUpload/ipfs-upload.sh \ The script will: 1. Add to IPFS with `--cid-version=1` (UnixFS v1 2025 profile) -2. Pin locally, on `itk-pinata`, and on `itk-filebase` +2. Pin locally and on `itk-filebase`; also on `itk-pinata` if registered + (skipped with a notice when only Filebase is configured) 3. If `--testing-data-repo` given and file ≤ 50 MB, copy to `/CID/` and `git add` it there. Files over 50 MB are skipped for the mirror step only (GitHub rejects > 50 MB) — IPFS pinning still diff --git a/Utilities/Maintenance/ExternalDataUpload/content-link-normalize.sh b/Utilities/Maintenance/ExternalDataUpload/content-link-normalize.sh index 0b3a3fbaa9a..6c49720b5b4 100755 --- a/Utilities/Maintenance/ExternalDataUpload/content-link-normalize.sh +++ b/Utilities/Maintenance/ExternalDataUpload/content-link-normalize.sh @@ -10,8 +10,9 @@ set -euo pipefail # 2. Verifies the bytes against the declared hash or CID. # 3. Re-materialises the actual file alongside the link, then invokes # ipfs-upload.sh on it so a fresh CID is produced under the UnixFS -# v1 2025 profile, pinned on itk-pinata and itk-filebase, and -# (optionally) mirrored into ITKTestingData. +# v1 2025 profile, pinned on itk-filebase (and on itk-pinata if +# configured — see ipfs-upload.sh), and (optionally) mirrored +# into ITKTestingData. # # Usage: # content-link-normalize.sh [options] diff --git a/Utilities/Maintenance/ExternalDataUpload/ipfs-pin-all.sh b/Utilities/Maintenance/ExternalDataUpload/ipfs-pin-all.sh index 9286a4f83cd..add07fd3188 100755 --- a/Utilities/Maintenance/ExternalDataUpload/ipfs-pin-all.sh +++ b/Utilities/Maintenance/ExternalDataUpload/ipfs-pin-all.sh @@ -2,7 +2,9 @@ set -euo pipefail # Batch-pin every CID in Testing/Data/content-links.manifest locally and on -# every configured remote pinning service (itk-pinata, itk-filebase, ...). +# every configured remote pinning service (itk-filebase, plus itk-pinata or +# any other PSA-compatible remote if registered with `ipfs pin remote +# service add`). # # Usage: ipfs-pin-all.sh [--background] # diff --git a/Utilities/Maintenance/ExternalDataUpload/ipfs-upload.sh b/Utilities/Maintenance/ExternalDataUpload/ipfs-upload.sh index 621464be36c..fb1cb27ba2c 100755 --- a/Utilities/Maintenance/ExternalDataUpload/ipfs-upload.sh +++ b/Utilities/Maintenance/ExternalDataUpload/ipfs-upload.sh @@ -2,9 +2,11 @@ set -euo pipefail # Upload a file to IPFS (CIDv1, UnixFS v1 2025 profile), pin it on the -# itk-pinata and itk-filebase remote pinning services, and replace the -# original with a .cid content link. Optionally mirror the bytes into a -# local ITKTestingData checkout at CID/. +# itk-filebase remote pinning service (and on itk-pinata if it is +# configured — Pinata is optional because pin-by-CID is a paid-plan +# feature there), and replace the original with a .cid content link. +# Optionally mirror the bytes into a local ITKTestingData checkout at +# CID/. # # Usage: # ipfs-upload.sh [--testing-data-repo ] [--background] @@ -30,15 +32,25 @@ set -euo pipefail # - Kubo (go-ipfs) installed and `ipfs` on PATH # - IPFS daemon running (ipfs daemon, or IPFS Desktop) # - UnixFS v1 2025 profile applied: `ipfs config profile apply unixfs-v1-2025` -# - `itk-pinata` and `itk-filebase` remote pinning services configured +# - `itk-filebase` remote pinning service configured (required) +# - `itk-pinata` remote pinning service configured (optional — Pinata's +# pin-by-CID endpoint is paid-only, so configure it only if you have +# a paid plan) # # See README.md in this directory for full setup. SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" REPO_ROOT="$(cd "$SCRIPT_DIR/../../.." && pwd)" -# Required remote pinning services — script errors if not configured. -REQUIRED_SERVICES=(itk-pinata itk-filebase) +# Required pinning service — Filebase's free tier supports pin-by-CID +# via the IPFS Pinning Service API, so we always need this one. +REQUIRED_SERVICES=(itk-filebase) + +# Optional pinning service — Pinata's pin-by-CID endpoint requires a paid +# plan (their free tier rejects PSA `pin remote add` with +# PAID_FEATURE_ONLY/403). Pinned to if configured, skipped with a notice +# if not. +OPTIONAL_SERVICES=(itk-pinata) # GitHub hard-rejects pushes containing any file > 50 MB. The ITKTestingData # mirror step is skipped for files over this limit. @@ -164,7 +176,9 @@ if ! ipfs swarm peers &>/dev/null; then exit 1 fi -# Check required remote pinning services are configured. +# Check required remote pinning services are configured. Optional services +# are recorded so they're attempted in addition to the required ones; a +# missing optional service is reported but does not abort the upload. CONFIGURED_SERVICES="$(ipfs pin remote service ls 2>/dev/null || true)" for svc in "${REQUIRED_SERVICES[@]}"; do if ! echo "$CONFIGURED_SERVICES" | grep -q "^${svc} "; then @@ -174,6 +188,15 @@ for svc in "${REQUIRED_SERVICES[@]}"; do fi done +ACTIVE_SERVICES=("${REQUIRED_SERVICES[@]}") +for svc in "${OPTIONAL_SERVICES[@]}"; do + if echo "$CONFIGURED_SERVICES" | grep -q "^${svc} "; then + ACTIVE_SERVICES+=("$svc") + else + echo "==> Optional pinning service '${svc}' is not configured; skipping." + fi +done + # --------------------------------------------------------------------------- # Add to IPFS # --------------------------------------------------------------------------- @@ -201,7 +224,7 @@ ipfs pin add "$CID" >/dev/null FAILED_PINS=() -for svc in "${REQUIRED_SERVICES[@]}"; do +for svc in "${ACTIVE_SERVICES[@]}"; do # Skip services where this CID is already queued/pinning/pinned — # Pinata rejects duplicate `pin remote add` calls with # DUPLICATE_OBJECT (400), and resubmitting on Filebase just makes a @@ -248,7 +271,7 @@ if [[ -n "$TESTING_DATA_REPO" ]]; then echo "WARNING: ${PIN_NAME} is ${FILE_SIZE_BYTES} bytes (> 50 MB)." >&2 echo " GitHub rejects pushes containing files > 50 MB, so it" >&2 echo " will NOT be mirrored to ITKTestingData." >&2 - echo " IPFS pin (local + itk-pinata + itk-filebase) succeeded;" >&2 + echo " IPFS pin (local + ${ACTIVE_SERVICES[*]}) succeeded;" >&2 echo " the .cid content link will still be produced." >&2 else MIRROR_DIR="$TESTING_DATA_REPO/CID" From 05be1889540997f97f731f8e56eac587e17438dd Mon Sep 17 00:00:00 2001 From: Matt McCormick Date: Fri, 1 May 2026 16:45:38 -0400 Subject: [PATCH 7/7] ENH: Replace Kubo + PSA workflow with Filebase S3 + ipfs-car MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Drops the local Kubo / IPFS-Desktop daemon, the `ipfs config profile apply unixfs-v1-2025` setup step, the `ipfs pin remote service add` PSA registrations (`itk-filebase`, `itk-pinata`), and the bash upload trio (`ipfs-upload.sh`, `content-link-normalize.sh`, `ipfs-pin-all.sh`) that drove them. The new contributor flow is pure Python on top of a small pixi environment: 1. `npx ipfs-car pack --no-wrap` builds a CARv1 locally. ipfs-car v1+ defaults (1 MiB chunks, 1024 children, raw leaves, CIDv1) match the unixfs-v1-2025 / IPIP-0499 profile, so no extra flags are needed to produce a reproducible CID. 2. `boto3` PUTs the CAR to a Filebase IPFS bucket through Filebase's S3-compatible REST API with `x-amz-meta-import: car`. Filebase imports the CAR server-side and exposes the resulting CID via `head_object` metadata. 3. The local CID and the CID Filebase reports are compared, and on success the file is replaced with `.cid`, the manifest at `Testing/Data/content-links.manifest` is updated, and the optional `--testing-data-repo` mirror step still copies the bytes into a local ITKTestingData clone (subject to the same 50 MB GitHub push limit as before). Concretely: - Add `boto3`, `nodejs`, and `requests` to a new `[tool.pixi.feature.external-data-upload]` feature plus an `external-data-upload` environment in `pyproject.toml`. Run `pixi install -e external-data-upload` once, then `pixi run -e external-data-upload python ...` for every upload. - New `Utilities/Maintenance/ExternalDataUpload/upload.py` is the single-file uploader: input validation (in-repo, no whitespace, not already a content link), CAR build, boto3 put_object with the `import: car` metadata header, head_object CID round-trip, manifest update, optional ITKTestingData mirror, and the same `git rm` / `git add` instructions as before. - New `Utilities/Maintenance/ExternalDataUpload/normalize.py` parses `ExternalData_URL_TEMPLATES` from `CMake/ITKExternalData.cmake` with a paren-aware scanner (the `%(hash)` / `%(algo)` substrings break naive `re.DOTALL` lazy matching), fetches each `.md5` / `.shaNNN` / `.cid` link via the gateway templates, verifies the bytes algorithmically (or via the `/ipfs/` server-side guarantee for CID links), and re-uploads through `upload.upload_file_to_filebase`. - `Utilities/Maintenance/ExternalDataUpload/README.md` is rewritten end to end: pixi setup, Filebase S3-key creation, `FILEBASE_ACCESS_KEY` / `FILEBASE_SECRET_KEY` / `FILEBASE_BUCKET` env-var contract, new troubleshooting section (missing npx, missing credentials, Filebase did not return a CID, CID mismatch). - `Utilities/Maintenance/ExternalDataUpload/SKILL.md` updated to describe the same flow for the AI agent: pixi env + Filebase credentials prerequisites; no Kubo, no PSA service registration. - `Documentation/docs/contributing/upload_binary_data.md` and `Documentation/docs/contributing/data.md` rewrite the one-time-setup, upload-a-file, mirror, and normalize sections around the pixi + Filebase workflow. The storage-locations list and testing-data-figure caption are reworded so Filebase appears as the upload destination and Kubo / Pinata only show up as build-time read paths (gateways, not pinning targets). - `Testing/Data/content-links.manifest` header rewritten to credit `upload.py` as the maintainer (previously named `ipfs-upload.sh`). The Filebase free tier supports the S3 import-as-CAR path used here, so the workflow needs no paid subscription — addressing the original Pinata \`PAID_FEATURE_ONLY\` blocker reported by @hjmjohnson — and CI runners can use the same env-var contract via GitHub Actions secrets. --- Documentation/docs/contributing/data.md | 35 +- .../docs/contributing/upload_binary_data.md | 154 +++--- Testing/Data/content-links.manifest | 8 +- .../Maintenance/ExternalDataUpload/README.md | 327 +++++-------- .../Maintenance/ExternalDataUpload/SKILL.md | 90 ++-- .../content-link-normalize.sh | 456 ------------------ .../ExternalDataUpload/ipfs-pin-all.sh | 178 ------- .../ExternalDataUpload/ipfs-upload.sh | 359 -------------- .../ExternalDataUpload/normalize.py | 319 ++++++++++++ .../Maintenance/ExternalDataUpload/upload.py | 341 +++++++++++++ pyproject.toml | 6 + 11 files changed, 914 insertions(+), 1359 deletions(-) delete mode 100755 Utilities/Maintenance/ExternalDataUpload/content-link-normalize.sh delete mode 100755 Utilities/Maintenance/ExternalDataUpload/ipfs-pin-all.sh delete mode 100755 Utilities/Maintenance/ExternalDataUpload/ipfs-upload.sh create mode 100755 Utilities/Maintenance/ExternalDataUpload/normalize.py create mode 100755 Utilities/Maintenance/ExternalDataUpload/upload.py diff --git a/Documentation/docs/contributing/data.md b/Documentation/docs/contributing/data.md index b1b2c9d3aa8..adab0e5e701 100644 --- a/Documentation/docs/contributing/data.md +++ b/Documentation/docs/contributing/data.md @@ -46,29 +46,31 @@ this example, with the [content-link-upload] web app. This app will upload the data to IPFS and provide a *.cid* CMake ExternalData content link file to download. This is the easiest and recommended way to upload new test data. -For advanced command line driven uploads, the upload script at -`Utilities/Maintenance/ExternalDataUpload/` can be used: +For command-line uploads, run the Python helper at +`Utilities/Maintenance/ExternalDataUpload/upload.py` from the +`external-data-upload` pixi environment: ```bash -Utilities/Maintenance/ExternalDataUpload/ipfs-upload.sh \ +pixi run -e external-data-upload python \ + Utilities/Maintenance/ExternalDataUpload/upload.py \ Modules/.../test/Baseline/MyTest.png ``` -The script adds the file to your local IPFS node under the UnixFS v1 2025 -profile, pins it on the `itk-filebase` remote pinning service (and on -`itk-pinata` when that service is configured — Pinata is optional because -its pin-by-CID endpoint requires a paid plan), and replaces the original -file with `MyTest.png.cid` containing the resulting CID. The CID and -source-tree path are also recorded in -`Testing/Data/content-links.manifest`. +The helper packs the file into a CARv1 with `npx ipfs-car` (defaults +match the unixfs-v1-2025 / IPIP-0499 profile so CIDs are reproducible), +uploads the CAR to your [Filebase] IPFS bucket via Filebase's S3-compatible +REST API, verifies the CID Filebase reports back matches what was computed +locally, and replaces the original file with `MyTest.png.cid` containing +that CID. The CID and source-tree path are also recorded in +`Testing/Data/content-links.manifest`. A local IPFS daemon is **not** +required. -For advanced CLI usage, first-time users must complete the one-time Kubo + pinning-service setup +First-time CLI users must complete the one-time pixi + Filebase setup documented in -[`Utilities/Maintenance/ExternalDataUpload/README.md`] before the script will -succeed. Contributors who cannot run a local Kubo daemon may instead use -the [content-link-upload] web app, which pins to [Filebase] and [Pinata] and returns -a `.cid` file directly — manifest and mirror updates must then be added by -hand. +[`Utilities/Maintenance/ExternalDataUpload/README.md`] before the helper +will succeed. Contributors who prefer not to run any local tooling can +instead use the [content-link-upload] web app, which returns a `.cid` +file directly — manifest and mirror updates must then be added by hand. For more details, see the description and procedures in [Upload Binary Data]. @@ -172,4 +174,3 @@ the [InterPlanetary File System (IPFS)]. [Upload Binary Data]: upload_binary_data.md [`Utilities/Maintenance/ExternalDataUpload/README.md`]: https://github.com/InsightSoftwareConsortium/ITK/blob/main/Utilities/Maintenance/ExternalDataUpload/README.md [Filebase]: https://filebase.com/ -[Pinata]: https://pinata.cloud/ diff --git a/Documentation/docs/contributing/upload_binary_data.md b/Documentation/docs/contributing/upload_binary_data.md index 9e3f7a46be2..fa08fe7978b 100644 --- a/Documentation/docs/contributing/upload_binary_data.md +++ b/Documentation/docs/contributing/upload_binary_data.md @@ -33,30 +33,25 @@ adopting Web3, we gain: - **Scalability** - **Sustainability** -Contributors to the ITK upload their data through a simple web app -that utilizes an easy-to-use, permissionless service, [Pinata]. +Contributors upload their data by running a small Python helper that packs +the file into a [CARv1] using `npx ipfs-car`, uploads the CAR to a [Filebase] +IPFS bucket through Filebase's S3-compatible REST API, records the resulting +CID in a manifest, and (optionally) mirrors the bytes into the [ITKTestingData] +GitHub Pages repository. A local [Kubo] daemon, IPFS Desktop, or any +`ipfs pin remote` PSA service is **not** required. See +[`Utilities/Maintenance/ExternalDataUpload/README.md`] for the one-time +developer setup and full workflow. -Data used in the ITK Git repository is periodically tracked in a -dedicated DataLad repository, the [ITKData DataLad repository]. -and stored across redundant locations so it can be retrieved from any of -the following: - -Contributors upload their data by running a small shell script that pushes -the file into [IPFS] via a local [Kubo] daemon, pins it on redundant -community-run pinning services, records the resulting CID in a manifest, and -(optionally) mirrors the bytes into the [ITKTestingData] GitHub Pages -repository. See [`Utilities/Maintenance/ExternalDataUpload/README.md`] for -the one-time developer setup and full workflow. +[CARv1]: https://ipld.io/specs/transport/car/carv1/ Data referenced from the ITK Git repository is stored across redundant locations so it can be retrieved from any of the following at build time: -- Local [Kubo] gateway (typically `127.0.0.1:8080`) +- [Filebase] IPFS gateway (where uploads land) - [ITKTestingData] GitHub Pages mirror -- [Filebase] (community pinning service, remote name `itk-filebase`) -- [Pinata] (community pinning service, remote name `itk-pinata`, - *optional* — pin-by-CID requires a paid Pinata plan) -- Public IPFS HTTP gateways (`ipfs.io`, `dweb.link`, `cloudflare-ipfs.com`) +- Public IPFS HTTP gateways (`ipfs.io`, `dweb.link`, `cloudflare-ipfs.com`, + `gateway.pinata.cloud`) +- Local [Kubo] gateway (typically `127.0.0.1:8080`) when present - Kitware's Apache HTTP Server - Local `ExternalData_OBJECT_STORES` cache - Archive tarballs from GitHub Releases @@ -64,17 +59,19 @@ locations so it can be retrieved from any of the following at build time: ![ITK testing data figure](./itk-testing-data.png) -*Testing data workflow. Testing or example data is uploaded to IPFS via the -content-link-upload.itk.org web app. New content is added with the -`Utilities/Maintenance/ExternalDataUpload/ipfs-upload.sh` script, which -pushes the bytes to a local [Kubo] node and pins them on `itk-filebase` -(and `itk-pinata` if a paid Pinata plan is configured) for redundancy. The -resulting CID is written as a `.cid` content link in the ITK source tree -and recorded in `Testing/Data/content-links.manifest`. Files ≤ 50 MB can -additionally be mirrored into [ITKTestingData] for GitHub Pages CDN -delivery. At test time an ITK build can fetch the data from a local cache, -archive tarball, the Apache HTTP server, the GitHub Pages mirror, or any -of several IPFS HTTP gateways.* +*Testing data workflow. New content is added with the +`Utilities/Maintenance/ExternalDataUpload/upload.py` helper, which packs +the file into a CAR with `npx ipfs-car` (defaults match the +unixfs-v1-2025 / IPIP-0499 profile so CIDs are reproducible) and uploads +the CAR to a [Filebase] IPFS bucket via boto3 against Filebase's +S3-compatible API. The CID Filebase reports back from `head_object` is +verified against the locally computed CID, written as a `.cid` content +link in the ITK source tree, and recorded in +`Testing/Data/content-links.manifest`. Files ≤ 50 MB can additionally be +mirrored into [ITKTestingData] for GitHub Pages CDN delivery. At test +time an ITK build can fetch the data from a local cache, archive tarball, +the Apache HTTP server, the GitHub Pages mirror, or any of several public +IPFS HTTP gateways.* See also our [Data](data.md) guide for more information. @@ -104,52 +101,53 @@ need to be followed: Upload new testing data ----------------------- -### Web app - -The easiest, recommended way to upload data is The [Content Link Upload] browser interface. +### One-time setup -### CLI one-time setup +The upload workflow needs: -The upload workflow requires: +- The `external-data-upload` pixi environment installed + (`pixi install -e external-data-upload`). It provides Python 3, [boto3], + and Node.js (which makes `npx ipfs-car` available without a separate + global install). +- A [Filebase] IPFS bucket and an S3 access key for that bucket. Filebase's + free tier is sufficient — the upload uses the S3 import-as-CAR path, + not the legacy IPFS Pinning Service API. +- The credentials exported as environment variables before running the + helper: -- A local [Kubo] daemon (CLI or IPFS Desktop) with the **UnixFS v1 2025** - profile applied, so CIDs are reproducible across implementations - (`ipfs config profile apply unixfs-v1-2025`, Kubo ≥ 0.40.0). -- The [Filebase] remote pinning service registered as `itk-filebase`. This - is **required** — it works on Filebase's free tier. -- Optionally, the [Pinata] remote pinning service registered as - `itk-pinata`. Pinata's pin-by-CID endpoint requires a **paid plan** - (the free plan rejects PSA `pin remote add` with `PAID_FEATURE_ONLY`), - so configure this only if you have a paid Pinata account; the upload - script skips it with a notice when it isn't registered. +```bash +export FILEBASE_ACCESS_KEY=... +export FILEBASE_SECRET_KEY=... +export FILEBASE_BUCKET=itk-data +``` -The full step-by-step setup — installing Kubo, signing up with -[Filebase] (and optionally [Pinata]), and registering each service -as a remote — is documented in +The full step-by-step setup is documented in [`Utilities/Maintenance/ExternalDataUpload/README.md`]. Complete that one-time setup before proceeding. +[boto3]: https://boto3.amazonaws.com/ + ### Upload a file -From the ITK source tree, run the upload script with the path to the file +From the ITK source tree, run the upload helper with the path to the file you want to upload: ```bash -Utilities/Maintenance/ExternalDataUpload/ipfs-upload.sh \ +pixi run -e external-data-upload python \ + Utilities/Maintenance/ExternalDataUpload/upload.py \ Modules/.../test/Baseline/MyTest.png ``` -The script will: - -1. Add the file to IPFS with `--cid-version=1` under the UnixFS v1 2025 - profile, producing a deterministic CID. -2. Pin locally, then on `itk-filebase` (and on `itk-pinata` if it is - registered — otherwise the script logs a notice and continues). By - default the script waits until each remote reports `pinned`, which - surfaces failures immediately but can take minutes per file as the - remote fetches the content. For batch runs pass `--background` to - submit pins asynchronously and verify afterwards with - `ipfs pin remote ls --status=queued,pinning,pinned`. +The helper will: + +1. Pack the file into a CARv1 with `npx ipfs-car pack --no-wrap` — + ipfs-car v1+ defaults to 1 MiB chunks, 1024 children per node, raw + leaves, CIDv1, which is the unixfs-v1-2025 profile, so the CID is + reproducible across implementations. +2. PUT the CAR to your Filebase IPFS bucket with + `x-amz-meta-import: car` so Filebase imports it server-side, then + read the imported CID back via `head_object` and verify it matches + the locally computed CID. 3. Replace `MyTest.png` in the source tree with `MyTest.png.cid` — a one-line text file containing the CID. 4. Append the CID and source-tree path to @@ -162,7 +160,8 @@ Pass `--testing-data-repo ` to additionally copy the file into a local clone of [ITKTestingData] at `CID/`: ```bash -Utilities/Maintenance/ExternalDataUpload/ipfs-upload.sh \ +pixi run -e external-data-upload python \ + Utilities/Maintenance/ExternalDataUpload/upload.py \ --testing-data-repo ~/src/ITKTestingData \ Modules/.../test/Baseline/MyTest.png ``` @@ -172,40 +171,40 @@ This populates the GitHub Pages mirror gateway already listed in [`CMake/ITKExternalData.cmake`]. Commit and push in the `ITKTestingData` repo to publish. Files larger than **50 MB** are skipped for the mirror step only (GitHub rejects pushes containing -files over 50 MB per file) — IPFS pinning on `itk-filebase` (and on -`itk-pinata` when configured) still proceeds for those files. +files over 50 MB per file) — the Filebase upload still proceeds for +those files. ### Alternative: upload via the web app -Contributors who prefer not to run a local [Kubo] daemon can upload a file +Contributors who prefer not to run any local tooling can upload a file through the [Content Link Upload] web app ([Alt Link]). The app pins the -file on [web3.storage] and returns the corresponding `.cid` content link -to download. The resulting CID is usable anywhere the script-produced CID -would be — but the manifest entry and the optional [ITKTestingData] -mirror must then be added by hand. The script-based workflow above is -preferred when available because it also updates -`Testing/Data/content-links.manifest` and pins on the ITK community -services in one step. +file and returns the corresponding `.cid` content link to download. The +resulting CID is usable anywhere the helper-produced CID would be — but +the manifest entry and the optional [ITKTestingData] mirror must then be +added by hand. The helper above is preferred when available because it +also updates `Testing/Data/content-links.manifest` in one step. ### Normalize existing content links Older `.md5` / `.sha256` / `.sha512` content links can be converted to -`.cid`, and existing `.cid` links can be regenerated under the UnixFS -v1 2025 profile, with: +`.cid`, and existing `.cid` links can be regenerated under the +unixfs-v1-2025 profile, with: ```bash -Utilities/Maintenance/ExternalDataUpload/content-link-normalize.sh +pixi run -e external-data-upload python \ + Utilities/Maintenance/ExternalDataUpload/normalize.py ``` See [`Utilities/Maintenance/ExternalDataUpload/README.md`] for the full -set of options (`--dry-run`, `--hash-only`, `--cid-only`, `--background`, -`--testing-data-repo`). +set of options (`--dry-run`, `--hash-only`, `--cid-only`, +`--testing-data-repo`, `--bucket`). ### Add the content link to the source tree -The upload script prints the exact commands to stage: +The upload helper prints the exact commands to stage: ```bash +git rm path/to/MyTest.png git add path/to/MyTest.png.cid git add Testing/Data/content-links.manifest git commit @@ -233,6 +232,5 @@ reachable gateway in [`CMake/ITKExternalData.cmake`]. [ITKTestingData]: https://github.com/InsightSoftwareConsortium/ITKTestingData [Kubo]: https://github.com/ipfs/kubo [multiformats]: https://multiformats.io/ -[Pinata]: https://pinata.cloud/ [solution to this problem]: https://blog.kitware.com/cmake-externaldata-using-large-files-with-distributed-version-control/ [`Utilities/Maintenance/ExternalDataUpload/README.md`]: https://github.com/InsightSoftwareConsortium/ITK/blob/main/Utilities/Maintenance/ExternalDataUpload/README.md diff --git a/Testing/Data/content-links.manifest b/Testing/Data/content-links.manifest index 10e3f127b37..62dd0827cb9 100644 --- a/Testing/Data/content-links.manifest +++ b/Testing/Data/content-links.manifest @@ -3,11 +3,9 @@ # One entry per line, format: # # Maintained automatically by -# Utilities/Maintenance/ExternalDataUpload/ipfs-upload.sh -# and used by -# Utilities/Maintenance/ExternalDataUpload/ipfs-pin-all.sh -# to batch-pin every CID on the local Kubo node and every configured -# remote pinning service (itk-pinata, itk-filebase, ...). +# Utilities/Maintenance/ExternalDataUpload/upload.py +# which packs each file into a CARv1 (unixfs-v1-2025 profile) and uploads +# the CAR to a Filebase IPFS bucket via boto3. # # Paths must not contain whitespace (the manifest uses a single space as # the field delimiter). Data lines are kept sorted by path; comment lines diff --git a/Utilities/Maintenance/ExternalDataUpload/README.md b/Utilities/Maintenance/ExternalDataUpload/README.md index c7175fcf514..7c670b97c60 100644 --- a/Utilities/Maintenance/ExternalDataUpload/README.md +++ b/Utilities/Maintenance/ExternalDataUpload/README.md @@ -1,6 +1,7 @@ # ITK External Data Upload -Upload large test images and baselines to IPFS, optionally mirror them into the +Upload large test images and baselines to [Filebase] IPFS storage, optionally +mirror them into the [`ITKTestingData`](https://github.com/InsightSoftwareConsortium/ITKTestingData) repository, and replace the original with a lightweight `.cid` content link committed to the ITK source tree. @@ -11,153 +12,99 @@ which fetches content at test configure time from the gateways listed there gateway, `ipfs.io`, `gateway.pinata.cloud`, `cloudflare-ipfs.com`, `dweb.link`). -## One-Time Developer Setup - -### 1. Install Kubo (IPFS) - -You need the Kubo IPFS implementation. Choose one method: - -**IPFS Desktop** (recommended — bundles the Kubo daemon with a GUI, with a -system-tray icon, peer/bandwidth statistics, a file browser for your MFS, and -one-click start/stop): - -Download from . IPFS Desktop -auto-starts the daemon on login and exposes the same HTTP API that the `ipfs` -CLI uses (default `127.0.0.1:5001`), so every command in this guide works -identically whether you started the daemon from the command line or from the -tray. - -**CLI only** (macOS): - -```bash -brew install ipfs -``` - -**CLI only** (Linux): - -Download the latest release from , then: - -```bash -tar xvfz kubo_*_linux-amd64.tar.gz -cd kubo && sudo bash install.sh -``` +## How the upload works -After installation, verify `ipfs` is on your PATH: +Uploads go directly to a Filebase IPFS bucket over Filebase's +S3-compatible REST API. A local +[Kubo](https://github.com/ipfs/kubo) daemon, IPFS Desktop, or any +configured `ipfs pin remote` PSA service is **not** required. -```bash -ipfs --version -``` +For each upload the helper script: -### 2. Initialize and Start the Daemon +1. Packs the file into a CARv1 with `npx ipfs-car pack --no-wrap`. ipfs-car + v1+ defaults to **1 MiB chunks, 1024 children per node, raw leaves, + CIDv1**, which is exactly the [unixfs-v1-2025] / IPIP-0499 profile, so + the CID is reproducible across implementations and matches what other + contributors and CI compute for the same content. +2. PUTs the CAR to the configured Filebase bucket with the + `x-amz-meta-import: car` header. Filebase imports the CAR and pins the + resulting CID server-side, exposing it via `head_object` metadata. +3. Reads the CID back from `head_object` and verifies it matches the local + CID. A mismatch aborts the upload. +4. Writes `.cid`, removes the original file, appends/updates an entry + in `Testing/Data/content-links.manifest`, and (with + `--testing-data-repo`) copies the bytes into a local `ITKTestingData` + clone for the GitHub Pages CDN mirror. -```bash -# One-time initialization (creates ~/.ipfs) -ipfs init +[unixfs-v1-2025]: https://github.com/ipfs/specs/blob/main/IPIP/0499-unixfs-v1-2025-profile.md -# Start the daemon (keep running in a separate terminal, or use IPFS Desktop) -ipfs daemon -``` +## One-Time Developer Setup -### 2a. Apply the UnixFS v1 2025 Profile +### 1. Install the pixi environment -Requires **Kubo v0.40.0 or later**. Apply once per node, before your first -upload: +The upload helpers run on top of a small pixi environment that brings in +[boto3] for the Filebase S3 calls, Node.js for `npx ipfs-car`, and +`requests` for the gateway-fetch verification path used by `normalize.py`. +From the ITK source tree: ```bash -ipfs config profile apply unixfs-v1-2025 +pixi install -e external-data-upload ``` -This pins the UnixFS importer settings (chunker, layout, raw-leaves, HAMT -directory thresholds) to standardized values for reproducible CIDs. Without it, -`ipfs add` defaults may drift across Kubo patch releases and across -implementations (Helia, rust-ipfs, boxo), so two contributors uploading the -same file can produce different CIDs — which breaks the `.cid` content-link -contract ITK relies on. - -The profile applies to **new adds only**; existing pinned content and -already-committed `.cid` files are unaffected. - -References: +[boto3]: https://boto3.amazonaws.com/ -- [Kubo v0.40.0 release notes](https://github.com/ipfs/kubo/releases/tag/v0.40.0) -- [Reproducible CIDs — IPFS blog, March 2026](https://blog.ipfs.tech/2026-03-reproducible-cids/) - -### 3. Configure Remote Pinning Services - -The upload script pins content on community-run remote services for -redundancy alongside the GitHub Pages mirror, matching the gateways -declared in `CMake/ITKExternalData.cmake`. Use the **exact service names** -`itk-filebase` (required) and `itk-pinata` (optional) — the upload script -looks up those names. - -#### Filebase (service name: `itk-filebase`, **required**) - -Filebase's IPFS Pinning Service endpoint accepts pin-by-CID on the free -tier, so this is the baseline pinning provider for ITK. - -1. Sign up at -2. Create an **IPFS bucket** at -3. Go to , select your IPFS bucket in the - "IPFS Pinning Service API Endpoint" section, and copy the generated token -4. Add the service: +That installs everything into `.pixi/envs/external-data-upload/`. Verify: ```bash -printf "Filebase token: " && read -rs FILEBASE_TOKEN && echo -ipfs pin remote service add itk-filebase https://api.filebase.io/v1/ipfs "$FILEBASE_TOKEN" +pixi run -e external-data-upload python --version +pixi run -e external-data-upload node --version +pixi run -e external-data-upload npx --yes ipfs-car --version ``` -5. Verify: +The first `npx ipfs-car` invocation downloads the package into the npm +cache; subsequent runs are offline. -```bash -ipfs pin remote service ls -# Should show: itk-filebase https://api.filebase.io/v1/ipfs -``` +### 2. Create a Filebase IPFS bucket and S3 keys -#### Pinata (service name: `itk-pinata`, **optional — paid plan**) +1. Sign up at (the free tier supports + pin-by-CID via the S3 import path). +2. Create an **IPFS bucket** at . + The bucket name is local to your account — the published CID is the + only thing other contributors need to retrieve the bytes. +3. Create an S3 access key for that bucket at + . Filebase ties keys to a single + bucket, so the access key + secret you receive can only see and + write to that bucket. -Pinata's `pin remote add` endpoint (the IPFS Pinning Service API) is -restricted to **paid plans** — the free plan rejects pin-by-CID with -`PAID_FEATURE_ONLY` (HTTP 403). Configure this service only if you have a -paid Pinata plan; otherwise leave it out and the upload script will skip -it with an informational message. Filebase + the GitHub Pages mirror still -provide redundancy. +### 3. Export the credentials -1. Sign up at and select a paid plan that includes - pin-by-CID -2. Create an API key at - - Enable **pinByHash** and **pinFileToIPFS** permissions -3. Copy the JWT token and add the service (use a prompt to avoid leaking - the token into shell history): +The helper scripts read three environment variables: ```bash -printf "Pinata JWT: " && read -rs PINATA_JWT && echo -ipfs pin remote service add itk-pinata https://api.pinata.cloud/psa "$PINATA_JWT" +export FILEBASE_ACCESS_KEY=... # S3 access key +export FILEBASE_SECRET_KEY=... # S3 secret key +export FILEBASE_BUCKET=itk-data # bucket name from step 2 ``` -4. Verify: - -```bash -ipfs pin remote service ls -# Should show: itk-pinata https://api.pinata.cloud/psa -``` +Add the exports to your shell profile or a `.env` file you source before +uploads. **Do not** commit credentials to the repository. ## Usage ### Upload a single file ```bash -Utilities/Maintenance/ExternalDataUpload/ipfs-upload.sh +pixi run -e external-data-upload python \ + Utilities/Maintenance/ExternalDataUpload/upload.py ``` The script will: -1. Add the file to IPFS with `--cid-version=1` (UnixFS v1 2025 profile) -2. Pin it locally -3. Pin it on `itk-filebase` (and on `itk-pinata` if that service is - configured; otherwise the script logs a notice and continues) -4. Replace the original file with `.cid` containing the CID -5. Append/update an entry in `Testing/Data/content-links.manifest` -6. Print the `git rm` / `git add` commands to stage the change +1. Pack the file into a CAR (CIDv1, unixfs-v1-2025 profile) +2. Upload the CAR to your Filebase IPFS bucket and verify the CID +3. Replace the original file with `.cid` containing the CID +4. Append/update an entry in `Testing/Data/content-links.manifest` +5. Print the `git rm` / `git add` commands to stage the change ### Also mirror the bytes to `ITKTestingData` @@ -169,7 +116,8 @@ at `CID/` and `git add` it there. This populates the gateway already listed in `CMake/ITKExternalData.cmake`. ```bash -Utilities/Maintenance/ExternalDataUpload/ipfs-upload.sh \ +pixi run -e external-data-upload python \ + Utilities/Maintenance/ExternalDataUpload/upload.py \ --testing-data-repo ~/src/ITKTestingData \ Testing/Data/Input/brain.nii.gz ``` @@ -177,99 +125,56 @@ Utilities/Maintenance/ExternalDataUpload/ipfs-upload.sh \ **GitHub 50 MB file size limit.** `ITKTestingData` is hosted on GitHub, which hard-rejects pushes containing files larger than **50 MB** per file. The upload script checks the file size before mirroring and refuses to copy files over -50 MB into the `ITKTestingData` tree. IPFS pinning (local + `itk-filebase`, -plus `itk-pinata` when configured) still proceeds for oversized files — the -mirror step is the only one that gets skipped, with a clear warning. +50 MB into the `ITKTestingData` tree. The Filebase upload still proceeds for +oversized files — the mirror step is the only one that gets skipped, with a +clear warning. Commit the staged `CID/` file in `ITKTestingData` and push; the `gh-pages` workflow on that repo republishes the new file at the GitHub Pages mirror gateway. -### Batch-pin every CID in the manifest - -```bash -Utilities/Maintenance/ExternalDataUpload/ipfs-pin-all.sh -``` - -Reads `Testing/Data/content-links.manifest` and pins every CID locally plus on -every configured remote pinning service. Useful for: - -- Bootstrapping a new local Kubo node with all ITK test content -- Re-pinning everything after rotating a pinning provider -- Verifying all pinned content is still reachable - -Use `--background` to queue remote pins asynchronously (the remote services -then fetch the content themselves): - -```bash -Utilities/Maintenance/ExternalDataUpload/ipfs-pin-all.sh --background -``` - ### Normalize existing content links to CID `.md5` / `.sha256` / `.sha512` content links can be converted to `.cid`, and -existing `.cid` links can be regenerated under the UnixFS v1 2025 profile (in +existing `.cid` links can be regenerated under the unixfs-v1-2025 profile (in case they were originally produced with older chunker defaults). ```bash -Utilities/Maintenance/ExternalDataUpload/content-link-normalize.sh +pixi run -e external-data-upload python \ + Utilities/Maintenance/ExternalDataUpload/normalize.py ``` The script will, for each content link under the given path: -1. Fetch the bytes through the gateways in `CMake/ITKExternalData.cmake` (same - order the build uses, so a gateway CI can't reach is a gateway this script - won't accept). +1. Fetch the bytes through the gateways in `CMake/ITKExternalData.cmake` + (same order the build uses, so a gateway CI can't reach is a gateway + this script won't accept). 2. Verify the fetched bytes against the declared hash (for `.md5` / `.shaNNN` - links) or the declared CID (for `.cid` links). If verification fails the - link is left untouched and reported. -3. Re-materialize the actual file next to the content link, then invoke - `ipfs-upload.sh` on it so the new CID is produced under the UnixFS v1 2025 - profile, pinned locally and on `itk-filebase` (and `itk-pinata` if - configured), and (if `--testing-data-repo` is passed) mirrored into - `ITKTestingData`. The old `.md5` / `.sha256` / `.sha512` link is removed; - a `.cid` link is written in its place. + links) or the declared CID (for `.cid` links — accepted only when fetched + via an IPFS HTTP gateway, which verifies server-side). +3. Re-materialize the actual file next to the content link, then call the + Filebase uploader so the new CID is produced under the unixfs-v1-2025 + profile and (if `--testing-data-repo` is passed) mirrored into + `ITKTestingData`. The old `.md5` / `.sha256` / `.sha512` link is + removed; a `.cid` link is written in its place. Common options: ```bash # Dry run — report what would change, modify nothing. -content-link-normalize.sh Modules/Filtering/Foo --dry-run +pixi run -e external-data-upload python \ + Utilities/Maintenance/ExternalDataUpload/normalize.py Modules/Filtering/Foo --dry-run # Also mirror bytes into a local ITKTestingData checkout. -content-link-normalize.sh Testing/Data/Input --testing-data-repo ~/src/ITKTestingData +pixi run -e external-data-upload python \ + Utilities/Maintenance/ExternalDataUpload/normalize.py Testing/Data/Input \ + --testing-data-repo ~/src/ITKTestingData # Only process files that are currently .md5 / .shaNNN (skip existing .cid). -content-link-normalize.sh Modules --hash-only - -# Batch run with asynchronous remote pinning (returns without waiting for -# each remote to reach 'pinned'). Verify afterwards with `ipfs pin remote ls`. -content-link-normalize.sh Modules --hash-only --background +pixi run -e external-data-upload python \ + Utilities/Maintenance/ExternalDataUpload/normalize.py Modules --hash-only ``` -### Synchronous vs. asynchronous remote pinning - -Both `ipfs-upload.sh` and `content-link-normalize.sh` default to -**synchronous** remote pinning: `ipfs pin remote add` blocks until the -remote reports `pinned`, which surfaces failures immediately and is -safest for one-off uploads. Remote fetch can take minutes per file, -however, which is impractical for batch runs. - -Pass `--background` to submit pin requests asynchronously — the remote -queues the pin and fetches the content itself, and the script returns -right away. Check final pin state with: - -```bash -ipfs pin remote ls --service=itk-filebase --status=queued,pinning,pinned -# Only when itk-pinata is configured (paid Pinata plan): -ipfs pin remote ls --service=itk-pinata --status=queued,pinning,pinned -``` - -Both scripts also pre-check each remote for an existing pin on the same -CID and skip the `pin remote add` call if one is already queued, pinning, -or pinned — this prevents `DUPLICATE_OBJECT` (400) errors on Pinata when -re-running on already-uploaded content. - ## Content Link Manifest `Testing/Data/content-links.manifest` is a plain-text index of every CID the @@ -291,9 +196,9 @@ Rules: - `` is a repo-relative path and **must not contain whitespace** — the manifest uses a single space as the field delimiter. Rename files with spaces before uploading. -- `ipfs-upload.sh` maintains this file automatically: entries are added on - first upload and replaced on re-upload. The data lines are sorted by path - for a minimal review diff; comment lines at the top are preserved. +- `upload.py` maintains this file automatically: entries are added on first + upload and replaced on re-upload. The data lines are sorted by path for a + minimal review diff; comment lines at the top are preserved. - The manifest should be committed alongside the `.cid` files the upload produced. @@ -322,45 +227,31 @@ CID, and the cache lookup misses. ## Troubleshooting -### `ipfs command not found on PATH` +### `ERROR: 'npx' not found on PATH` -Install Kubo (see step 1 above). If using IPFS Desktop on macOS, the app -installs `/usr/local/bin/ipfs` automatically; on Linux, IPFS Desktop does not -install a CLI symlink, so either add Kubo separately or point your shell at -the bundled binary inside the AppImage. +The pixi environment is not active. Run the helpers via `pixi run -e +external-data-upload python ...`, or activate the environment first with +`pixi shell -e external-data-upload`. -### `IPFS daemon does not appear to be running` +### `ERROR: Missing Filebase credentials` -Start the daemon: `ipfs daemon` in a separate terminal, or launch IPFS -Desktop. The script tests the connection with `ipfs swarm peers`, which -requires an active daemon. +Export `FILEBASE_ACCESS_KEY`, `FILEBASE_SECRET_KEY`, and `FILEBASE_BUCKET` +(or pass `--bucket`) before running the upload script. See setup step 3. -### `Required pinning service 'itk-filebase' is not configured` - -Run `ipfs pin remote service ls` to see configured services. Re-add with the -commands in step 3 above. Tokens may have expired if you revoked the API key. -The script intentionally refuses to upload if `itk-filebase` is missing — it -is the baseline pin provider that works on the free tier. `itk-pinata` is -optional (paid plan only); if it isn't registered the script prints a -notice and continues. - -### `PAID_FEATURE_ONLY` / `403 Forbidden` from Pinata - -Pinata's free plan no longer accepts pin-by-CID via the IPFS Pinning Service -API; their `pin remote add` endpoint is gated to paid plans. If you don't -have a paid plan, remove the service so the upload script skips it cleanly: - -```bash -ipfs pin remote service rm itk-pinata -``` +### `Filebase did not return a CID for ...` -Filebase + the GitHub Pages mirror still provide redundancy. +The CAR was uploaded but Filebase did not import it. Common causes: -### Remote pin failed +- The bucket is a regular S3 bucket, not an **IPFS** bucket — recreate at + . +- The S3 access key is read-only or scoped to a different bucket. +- Filebase rate-limited the request — retry after a few seconds. -The script prints retry commands for any failed pins. Common causes: +### `CID mismatch: local=... filebase=...` -- **Expired API token** — regenerate at the service dashboard -- **Rate limiting** — wait a moment and retry -- **Large file timeout** — the file may take time to transfer; retry the - printed `ipfs pin remote add` command manually +The CID this client computed (via `npx ipfs-car`) and the CID Filebase +reported after import disagree. This indicates a chunker/profile drift +between the local ipfs-car version and Filebase's importer. Confirm +`pixi run -e external-data-upload npx ipfs-car --version` is v1 or newer, +then retry; if the mismatch persists, file an issue and include both CIDs +in the report. diff --git a/Utilities/Maintenance/ExternalDataUpload/SKILL.md b/Utilities/Maintenance/ExternalDataUpload/SKILL.md index 78d983ebf9e..f90bb573430 100644 --- a/Utilities/Maintenance/ExternalDataUpload/SKILL.md +++ b/Utilities/Maintenance/ExternalDataUpload/SKILL.md @@ -1,14 +1,13 @@ --- name: external-data-upload description: > - Upload ITK test data to IPFS and produce .cid content links, pin on - itk-filebase (and itk-pinata if configured — Pinata is optional because - pin-by-CID requires a paid plan there), optionally mirror into - ITKTestingData, and normalize existing .md5 / .sha256 / .cid content - links. Use when the - user wants to add test images, baseline data, or model files under - Testing/Data/ or a module's data/ directory, or when asked to convert - hash-based content links to CID. + Upload ITK test data to Filebase IPFS storage and produce .cid content + links via the S3 REST API + npx ipfs-car (no Kubo daemon required), + optionally mirror into ITKTestingData, and normalize existing + .md5 / .sha256 / .cid content links. Use when the user wants to add + test images, baseline data, or model files under Testing/Data/ or a + module's data/ directory, or when asked to convert hash-based content + links to CID. allowed-tools: - Bash - Read @@ -16,75 +15,73 @@ allowed-tools: # ITK External Data Upload -Upload a file to IPFS and replace it with a `.cid` content link, maintain the -`Testing/Data/content-links.manifest`, and (optionally) mirror the bytes into -`ITKTestingData` for the GitHub Pages gateway. Also: regenerate existing -`.md5` / `.sha256` / `.cid` content links under the UnixFS v1 2025 profile. +Upload a file to Filebase IPFS storage and replace it with a `.cid` content +link, maintain `Testing/Data/content-links.manifest`, and (optionally) mirror +the bytes into `ITKTestingData` for the GitHub Pages gateway. Also: regenerate +existing `.md5` / `.sha256` / `.cid` content links under the unixfs-v1-2025 +profile. ## Prerequisites -The developer must have IPFS and pinning services configured. If not, direct -them to [`README.md`](./README.md) in this directory. +The developer must have the `external-data-upload` pixi environment installed +and Filebase credentials exported. If not, direct them to +[`README.md`](./README.md) in this directory. Required: -- IPFS daemon running (`ipfs daemon` or IPFS Desktop) -- UnixFS v1 2025 profile applied (`ipfs config profile apply unixfs-v1-2025`) -- `itk-filebase` remote pinning service configured (works on the free tier) +- pixi environment installed: `pixi install -e external-data-upload` +- Filebase IPFS bucket with an S3 access key +- Environment variables exported: `FILEBASE_ACCESS_KEY`, + `FILEBASE_SECRET_KEY`, `FILEBASE_BUCKET` -Optional: - -- `itk-pinata` remote pinning service configured. Pinata's pin-by-CID - endpoint is **paid-only** — the free plan rejects PSA `pin remote add` - with `PAID_FEATURE_ONLY` (403). The upload script skips this service - with a notice if it isn't registered. +A local Kubo daemon, IPFS Desktop, or any `ipfs pin remote` PSA service is +**not** required — the upload talks to Filebase's S3 REST API directly and +relies on `npx ipfs-car` (installed via Node.js in the pixi environment) for +local CAR construction. ## Tasks this skill handles ### 1. Upload a single file -Run the upload script: +Run the upload script via pixi: ```bash -Utilities/Maintenance/ExternalDataUpload/ipfs-upload.sh +pixi run -e external-data-upload python \ + Utilities/Maintenance/ExternalDataUpload/upload.py ``` If the user mentions `ITKTestingData` or asks you to mirror the bytes to GitHub Pages, pass `--testing-data-repo `: ```bash -Utilities/Maintenance/ExternalDataUpload/ipfs-upload.sh \ +pixi run -e external-data-upload python \ + Utilities/Maintenance/ExternalDataUpload/upload.py \ --testing-data-repo \ ``` The script will: -1. Add to IPFS with `--cid-version=1` (UnixFS v1 2025 profile) -2. Pin locally and on `itk-filebase`; also on `itk-pinata` if registered - (skipped with a notice when only Filebase is configured) +1. Pack the file into a CARv1 with `npx ipfs-car pack --no-wrap` + (defaults match the unixfs-v1-2025 profile) +2. Upload the CAR to the Filebase IPFS bucket via boto3 with + `Metadata={"import": "car"}` and verify the CID returned by + `head_object` matches the local CID 3. If `--testing-data-repo` given and file ≤ 50 MB, copy to `/CID/` and `git add` it there. Files over 50 MB are skipped - for the mirror step only (GitHub rejects > 50 MB) — IPFS pinning still + for the mirror step only (GitHub rejects > 50 MB) — Filebase pinning still succeeds. 4. Replace the source file with `.cid` 5. Update `Testing/Data/content-links.manifest` -### 2. Pin every CID from the manifest - -```bash -Utilities/Maintenance/ExternalDataUpload/ipfs-pin-all.sh -``` - -Use for bootstrapping a new IPFS node or re-pinning after rotating a provider. - -### 3. Normalize existing content links +### 2. Normalize existing content links Use when the user wants to convert `.md5` / `.sha256` / `.sha512` links to -`.cid`, or re-generate `.cid` links under the UnixFS v1 2025 profile. +`.cid`, or re-generate `.cid` links under the unixfs-v1-2025 profile. ```bash -Utilities/Maintenance/ExternalDataUpload/content-link-normalize.sh +pixi run -e external-data-upload python \ + Utilities/Maintenance/ExternalDataUpload/normalize.py ``` Useful options: @@ -92,16 +89,13 @@ Useful options: - `--dry-run` — report what would change - `--hash-only` — only touch `.md5` / `.shaNNN` links, leave `.cid` alone - `--cid-only` — only re-hash existing `.cid` links under the new profile -- `--testing-data-repo ` — forwarded to `ipfs-upload.sh` -- `--background` — forwarded to `ipfs-upload.sh`; submit remote pins - asynchronously instead of waiting for each to reach `pinned`. Use for - batch runs where synchronous pinning would take minutes per file. - Verify final state afterwards with `ipfs pin remote ls`. +- `--testing-data-repo ` — forwarded to the upload helper +- `--bucket ` — Filebase bucket override (default: `$FILEBASE_BUCKET`) The normalize script fetches bytes through the gateway templates in `CMake/ITKExternalData.cmake` (same order as the build), verifies them -against the declared hash or CID, and invokes `ipfs-upload.sh` to produce -the new `.cid`. +against the declared hash or CID, and calls `upload.upload_file_to_filebase` +to produce the new `.cid`. ## After Upload diff --git a/Utilities/Maintenance/ExternalDataUpload/content-link-normalize.sh b/Utilities/Maintenance/ExternalDataUpload/content-link-normalize.sh deleted file mode 100755 index 6c49720b5b4..00000000000 --- a/Utilities/Maintenance/ExternalDataUpload/content-link-normalize.sh +++ /dev/null @@ -1,456 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail - -# Normalize ITK content links: convert .md5 / .shaNNN links to .cid and -# regenerate existing .cid links under the UnixFS v1 2025 profile. -# -# For each content link found, the script: -# 1. Fetches the bytes via the gateway templates declared in -# CMake/ITKExternalData.cmake (identical order to the build). -# 2. Verifies the bytes against the declared hash or CID. -# 3. Re-materialises the actual file alongside the link, then invokes -# ipfs-upload.sh on it so a fresh CID is produced under the UnixFS -# v1 2025 profile, pinned on itk-filebase (and on itk-pinata if -# configured — see ipfs-upload.sh), and (optionally) mirrored -# into ITKTestingData. -# -# Usage: -# content-link-normalize.sh [options] -# -# Options: -# --testing-data-repo Forwarded to ipfs-upload.sh. Local -# ITKTestingData clone to mirror bytes into. -# --background Forwarded to ipfs-upload.sh. Submit remote -# pin requests asynchronously; useful for -# batch runs where waiting for each pin to -# reach 'pinned' status (minutes per file) -# is impractical. Verify final pin state -# afterwards with `ipfs pin remote ls`. -# --dry-run List what would change without modifying. -# --hash-only Process only .md5 / .shaNNN links -# (leave existing .cid links alone). -# --cid-only Process only .cid links -# (re-hash under UnixFS v1 2025 profile). -# -h|--help Show this help. -# -# Exit codes: -# 0 — all content links normalized -# 1 — usage / environment error -# 2 — one or more links failed to fetch, verify, or re-upload - -SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" -REPO_ROOT="$(cd "$SCRIPT_DIR/../../.." && pwd)" -CMAKE_FILE="$REPO_ROOT/CMake/ITKExternalData.cmake" -UPLOAD_SCRIPT="$SCRIPT_DIR/ipfs-upload.sh" - -info() { printf '==> %s\n' "$*"; } -warn() { printf 'WARN: %s\n' "$*" >&2; } -die() { printf 'ERROR: %s\n' "$*" >&2; exit 1; } - -show_help() { - sed -n '3,/^$/{ s/^# \?//; p }' "$0" - exit 0 -} - -# --------------------------------------------------------------------------- -# Argument parsing -# --------------------------------------------------------------------------- - -TESTING_DATA_REPO="" -BACKGROUND=false -DRY_RUN=false -HASH_ONLY=false -CID_ONLY=false -TARGET="" - -while [[ $# -gt 0 ]]; do - case "$1" in - -h|--help) show_help ;; - --dry-run) DRY_RUN=true; shift ;; - --hash-only) HASH_ONLY=true; shift ;; - --cid-only) CID_ONLY=true; shift ;; - --background) BACKGROUND=true; shift ;; - --testing-data-repo) - TESTING_DATA_REPO="${2:?--testing-data-repo requires a path}" - shift 2 - ;; - --testing-data-repo=*) - TESTING_DATA_REPO="${1#--testing-data-repo=}" - shift - ;; - -*) die "Unknown option: $1" ;; - *) - [[ -z "$TARGET" ]] || die "Unexpected positional arg: $1" - TARGET="$1" - shift - ;; - esac -done - -[[ -n "$TARGET" ]] || die "Path or file required. Example: content-link-normalize.sh Testing/Data/Input" -[[ -e "$TARGET" ]] || die "Not found: $TARGET" -[[ -f "$CMAKE_FILE" ]] || die "Cannot find $CMAKE_FILE" -[[ -x "$UPLOAD_SCRIPT" ]] || die "Cannot find or execute $UPLOAD_SCRIPT" - -if $HASH_ONLY && $CID_ONLY; then - die "--hash-only and --cid-only are mutually exclusive" -fi - -# --------------------------------------------------------------------------- -# Prerequisites -# --------------------------------------------------------------------------- - -command -v curl >/dev/null 2>&1 || die "curl is required" -command -v ipfs >/dev/null 2>&1 || die "ipfs is required (for CID recomputation)" - -# Resolve the local command that computes a digest for an algorithm. -# Returns a command line (possibly multi-word, e.g. "shasum -a 256") whose -# first whitespace-delimited token on stdout is the hex digest. -# -# Prefers GNU coreutils (`md5sum`, `shaNNNsum`) when present; falls back to -# the BSD/macOS tools that ship by default on macOS: `md5 -r` (output format -# matches md5sum) and `shasum -a NNN`. On macOS you can install coreutils -# via `brew install coreutils` to get the `*sum` variants as well. -hash_cmd_for_ext() { - case "$1" in - md5) - if command -v md5sum >/dev/null 2>&1; then - echo "md5sum" - elif command -v md5 >/dev/null 2>&1; then - # BSD md5; -r prints ` ` like md5sum. - echo "md5 -r" - else - return 1 - fi - ;; - sha1|sha224|sha256|sha384|sha512) - if command -v "${1}sum" >/dev/null 2>&1; then - echo "${1}sum" - elif command -v shasum >/dev/null 2>&1; then - echo "shasum -a ${1#sha}" - else - return 1 - fi - ;; - *) - return 1 - ;; - esac -} - -# ITK content links in practice use `.md5` (legacy) or `.sha512` (current); -# other sha variants are supported for completeness but not pre-checked. -for alg in md5 sha512; do - hash_cmd_for_ext "$alg" >/dev/null 2>&1 \ - || warn "no tool available to compute ${alg}; any .${alg} content links will fail to verify" -done - -# --------------------------------------------------------------------------- -# Parse ExternalData_URL_TEMPLATES from CMake/ITKExternalData.cmake -# --------------------------------------------------------------------------- -# -# Matches the order in the .cmake file exactly. The block we want looks like: -# -# list( -# APPEND -# ExternalData_URL_TEMPLATES -# # comment -# "https://.../%(hash)" -# ... -# ) -# -# Strategy: join the whole file into one logical string, locate the -# `list(... ExternalData_URL_TEMPLATES ... )` invocation by matching -# balanced parentheses, then print every quoted template inside it that -# contains %(hash). - -readarray -t URL_TEMPLATES < <( - awk ' - BEGIN { depth = 0; in_block = 0 } - { - line = $0 - # Trim leading whitespace. - sub(/^[[:space:]]+/, "", line) - - # Enter the block when we see `list(` followed somewhere by - # `ExternalData_URL_TEMPLATES` at depth 1. We buffer tokens - # at depth 1 until we are sure. - if (!in_block && line ~ /^list[[:space:]]*\(/) { - pending_list = 1 - depth = 1 - next - } - if (pending_list) { - if (line ~ /ExternalData_URL_TEMPLATES/) { - in_block = 1 - pending_list = 0 - next - } - # Track depth so we know when the list(...) we rejected ends. - n_open = gsub(/\(/, "(", line) - n_close = gsub(/\)/, ")", line) - depth += n_open - n_close - if (depth <= 0) { pending_list = 0; depth = 0 } - next - } - - if (in_block) { - if (line ~ /^#/) next - if (line ~ /^\)/) { in_block = 0; depth = 0; next } - if (match(line, /"[^"]+"/)) { - tmpl = substr(line, RSTART + 1, RLENGTH - 2) - if (tmpl ~ /%\(hash\)/) print tmpl - } - } - } - ' "$CMAKE_FILE" -) - -if [[ ${#URL_TEMPLATES[@]} -eq 0 ]]; then - die "Failed to parse ExternalData_URL_TEMPLATES from $CMAKE_FILE" -fi - -info "Loaded ${#URL_TEMPLATES[@]} gateway template(s) from CMake/ITKExternalData.cmake" - -# --------------------------------------------------------------------------- -# Helpers -# --------------------------------------------------------------------------- - -# Algorithm mapping: extension -> uppercase algorithm name for %(algo). -# Matches CMake ExternalData.cmake's _ExternalData_link_content behaviour. -algo_uc_for_ext() { - case "$1" in - md5) echo "MD5" ;; - sha1) echo "SHA1" ;; - sha224) echo "SHA224" ;; - sha256) echo "SHA256" ;; - sha384) echo "SHA384" ;; - sha512) echo "SHA512" ;; - # CID uses a lowercase override in ITKExternalData.cmake - # (ExternalData_URL_ALGO_CID_lower = cid). - cid) echo "cid" ;; - *) return 1 ;; - esac -} - -# Substitute %(algo) / %(hash) in a URL template. -render_url() { - local template="$1" algo="$2" hash="$3" - local url="${template//%(algo)/$algo}" - url="${url//%(hash)/$hash}" - printf '%s\n' "$url" -} - -# Fetch a content link into a tempfile, verifying the bytes correspond to -# the declared digest or CID. Prints the path of the verified tempfile on -# success. -# -# For CID links: `ipfs cat` is the primary fetch path because the daemon -# verifies the returned bytes server-side against the requested CID. -# Public IPFS HTTP gateways (paths containing /ipfs/) also verify -# server-side, so fetches from those URLs are accepted without local -# recomputation. Local `ipfs add --only-hash` is NOT used for verification -# because it can produce a different CID from the stored one when the -# original upload used non-default chunker or hash parameters — chunker -# drift is exactly what the UnixFS v1 2025 profile is meant to fix, so a -# mismatch would be expected, not an error. -# -# For hash links (.md5, .shaNNN): non-IPFS gateways only serve bytes by -# name, so we recompute the digest locally and compare. -fetch_and_verify() { - local ext="$1" # cid / md5 / shaNNN - local value="$2" # the actual hash or CID - local out - out="$(mktemp -t itk-content-link.XXXXXX)" - - local algo_uc - algo_uc="$(algo_uc_for_ext "$ext")" || { - warn "Unknown content-link extension: .${ext}" - rm -f "$out" - return 1 - } - - # Fast path for .cid: fetch via the running daemon. Verification is - # implicit — the daemon refuses to return bytes that do not hash back - # to the CID. - if [[ "$ext" == "cid" ]]; then - if ipfs cat "$value" > "$out" 2>/dev/null && [[ -s "$out" ]]; then - printf '%s\n' "$out" - return 0 - fi - fi - - local template rendered - for template in "${URL_TEMPLATES[@]}"; do - rendered="$(render_url "$template" "$algo_uc" "$value")" - - # IPFS gateway templates (path contains /ipfs/) only make sense for CIDs. - if [[ "$ext" != "cid" && "$rendered" == *"/ipfs/"* ]]; then - continue - fi - - if ! curl -sfL --connect-timeout 10 --max-time 120 -o "$out" "$rendered"; then - continue - fi - - if verify_bytes "$ext" "$value" "$out" "$rendered"; then - printf '%s\n' "$out" - return 0 - else - warn " content from ${rendered} did not verify; trying next gateway" - fi - done - - rm -f "$out" - return 1 -} - -# Verify that the fetched bytes at $file correspond to the declared link. -# -# For CID links: trust only fetches from IPFS HTTP gateways, which verify -# server-side (a CID-indexed path the server actually serves is by -# definition a path whose bytes hash to that CID). -# -# For hash links: recompute the digest and compare case-insensitively. -verify_bytes() { - local ext="$1" expected="$2" file="$3" source_url="${4:-}" - if [[ "$ext" == "cid" ]]; then - # IPFS HTTP gateways do server-side verification; accept those. - if [[ "$source_url" == *"/ipfs/"* ]]; then - [[ -s "$file" ]] - return - fi - # Non-IPFS origin (e.g. GitHub Pages mirror at .../CID/) — - # we cannot verify locally without risking chunker-drift false - # negatives, so reject. The `ipfs cat` fast path in - # fetch_and_verify is the canonical way to resolve a .cid. - return 1 - fi - - local cmd actual - cmd="$(hash_cmd_for_ext "$ext")" || return 1 - # Word-splitting is intentional — a fallback command like "shasum -a 256" - # expands to multiple argv entries, while the coreutils "md5sum" stays - # as a single argv entry. - # shellcheck disable=SC2086 - actual="$($cmd "$file" | awk '{print $1}')" - [[ "${actual,,}" == "${expected,,}" ]] -} - -# --------------------------------------------------------------------------- -# Enumerate targets -# --------------------------------------------------------------------------- - -if [[ -f "$TARGET" ]]; then - LINKS=("$TARGET") -else - LINKS=() - readarray -t LINKS < <( - find "$TARGET" -type f \( \ - -name "*.cid" \ - -o -name "*.md5" \ - -o -name "*.sha1" \ - -o -name "*.sha224" \ - -o -name "*.sha256" \ - -o -name "*.sha384" \ - -o -name "*.sha512" \ - \) | LC_ALL=C sort - ) -fi - -# Filter by --hash-only / --cid-only. Iterate defensively so `set -u` on an -# empty LINKS array (e.g. directory with no content links) does not error -# out on bash versions before 4.4. -FILTERED=() -if [[ ${#LINKS[@]} -gt 0 ]]; then - for link in "${LINKS[@]}"; do - ext="${link##*.}" - if $HASH_ONLY && [[ "$ext" == "cid" ]]; then continue; fi - if $CID_ONLY && [[ "$ext" != "cid" ]]; then continue; fi - FILTERED+=("$link") - done -fi - -if [[ ${#FILTERED[@]} -eq 0 ]]; then - info "No matching content links under ${TARGET}. Nothing to do." - exit 0 -fi - -LINKS=("${FILTERED[@]}") - -info "Processing ${#LINKS[@]} content link(s)..." -$DRY_RUN && info "(--dry-run: no files will be modified)" - -# --------------------------------------------------------------------------- -# Main loop -# --------------------------------------------------------------------------- - -UPLOAD_ARGS=() -if [[ -n "$TESTING_DATA_REPO" ]]; then - UPLOAD_ARGS+=(--testing-data-repo "$TESTING_DATA_REPO") -fi -if $BACKGROUND; then - UPLOAD_ARGS+=(--background) -fi - -FAIL=0 - -for link in "${LINKS[@]}"; do - ext="${link##*.}" - value="$(tr -d '[:space:]' < "$link")" - real_file="${link%.${ext}}" - - if [[ -z "$value" ]]; then - printf 'FAIL %s empty-content-link\n' "$link" >&2 - FAIL=$((FAIL + 1)) - continue - fi - - if $DRY_RUN; then - printf 'WOULD-NORMALIZE %s (%s=%s) -> %s.cid\n' \ - "$link" "$ext" "$value" "$real_file" - continue - fi - - info "Normalizing ${link} (${ext}=${value})" - - if [[ -e "$real_file" ]]; then - die "Refusing to normalize: ${real_file} already exists on disk. Delete or move it first." - fi - - tmp_bytes="" - if ! tmp_bytes="$(fetch_and_verify "$ext" "$value")"; then - printf 'FAIL %s fetch-or-verify-failed\n' "$link" >&2 - FAIL=$((FAIL + 1)) - continue - fi - - # Stage the real file next to the link, then re-upload via ipfs-upload.sh. - mv "$tmp_bytes" "$real_file" - - # Remove the old content link BEFORE running ipfs-upload.sh — the upload - # script rejects inputs that look like content links (defensive guard), - # but we also want a clean working tree if the upload fails. - rm -f "$link" - - if ! "$UPLOAD_SCRIPT" "${UPLOAD_ARGS[@]}" "$real_file"; then - printf 'FAIL %s upload-failed\n' "$link" >&2 - # Best-effort recovery: restore the original link file from its value. - # ipfs-upload.sh writes the .cid file and removes the data file before - # updating the manifest, so a failure in the manifest step can leave a - # .cid orphan alongside the restored original link — clean it up too. - printf '%s\n' "$value" > "$link" - rm -f "$real_file" "${real_file}.cid" - FAIL=$((FAIL + 1)) - continue - fi - - printf 'NORMALIZE %s (%s) -> %s.cid\n' "$link" "$ext" "$real_file" -done - -if (( FAIL > 0 )); then - warn "${FAIL} content link(s) failed to normalize." - exit 2 -fi - -info "Done. Review changes and commit as a STYLE: commit (see Documentation/AI/git-commits.md)." diff --git a/Utilities/Maintenance/ExternalDataUpload/ipfs-pin-all.sh b/Utilities/Maintenance/ExternalDataUpload/ipfs-pin-all.sh deleted file mode 100755 index add07fd3188..00000000000 --- a/Utilities/Maintenance/ExternalDataUpload/ipfs-pin-all.sh +++ /dev/null @@ -1,178 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail - -# Batch-pin every CID in Testing/Data/content-links.manifest locally and on -# every configured remote pinning service (itk-filebase, plus itk-pinata or -# any other PSA-compatible remote if registered with `ipfs pin remote -# service add`). -# -# Usage: ipfs-pin-all.sh [--background] -# -# Options: -# --background Queue remote pins asynchronously (faster, no wait). - -SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" -REPO_ROOT="$(cd "$SCRIPT_DIR/../../.." && pwd)" -MANIFEST="$REPO_ROOT/Testing/Data/content-links.manifest" - -BACKGROUND="" -while [[ $# -gt 0 ]]; do - case "$1" in - --background) - BACKGROUND="--background" - shift - ;; - -h|--help) - sed -n '4,11p' "${BASH_SOURCE[0]}" | sed 's/^# \{0,1\}//' - exit 0 - ;; - *) - echo "ERROR: Unknown argument: $1" >&2 - echo "Usage: $(basename "${BASH_SOURCE[0]}") [--background]" >&2 - exit 2 - ;; - esac -done - -# --------------------------------------------------------------------------- -# Validate manifest -# --------------------------------------------------------------------------- - -if [[ ! -f "$MANIFEST" ]]; then - echo "ERROR: Manifest not found: $MANIFEST" >&2 - exit 1 -fi - -ENTRY_COUNT="$(grep -Evc '^(#|$)' "$MANIFEST" || true)" -if [[ "$ENTRY_COUNT" -eq 0 ]]; then - echo "Manifest is empty — nothing to pin." - exit 0 -fi - -# --------------------------------------------------------------------------- -# Prerequisites -# --------------------------------------------------------------------------- - -if ! command -v ipfs &>/dev/null; then - echo "ERROR: 'ipfs' command not found on PATH." >&2 - echo " Install Kubo: https://docs.ipfs.tech/install/command-line/" >&2 - exit 1 -fi - -if ! ipfs swarm peers &>/dev/null; then - echo "ERROR: IPFS daemon does not appear to be running." >&2 - echo " Start with: ipfs daemon" >&2 - exit 1 -fi - -# Discover configured remote services (none required for batch pinning). -CONFIGURED_SERVICES="$(ipfs pin remote service ls 2>/dev/null || true)" -SERVICES=() -while IFS= read -r line; do - svc="$(echo "$line" | awk '{print $1}')" - if [[ -n "$svc" ]]; then - SERVICES+=("$svc") - fi -done <<< "$CONFIGURED_SERVICES" - -if [[ ${#SERVICES[@]} -eq 0 ]]; then - echo "WARNING: No remote pinning services configured." >&2 - echo " Only local pinning will be performed." >&2 -fi - -# --------------------------------------------------------------------------- -# Pin each CID -# --------------------------------------------------------------------------- - -TOTAL=0 -LOCAL_FAILED=0 -LOCAL_FAILED_ENTRIES=() -REMOTE_FAILED=0 -REMOTE_FAILED_ENTRIES=() - -echo "==> Pinning ${ENTRY_COUNT} CIDs from manifest..." -if [[ -n "$BACKGROUND" ]]; then - echo " (remote pins queued in background)" -fi -echo "" - -while IFS= read -r line; do - # Skip comments and empty lines. - [[ "$line" =~ ^# ]] && continue - [[ -z "$line" ]] && continue - - CID="$(echo "$line" | awk '{print $1}')" - FILEPATH="$(echo "$line" | awk '{print $2}')" - - # Skip malformed lines (missing CID or filepath). - if [[ -z "$CID" || -z "$FILEPATH" ]]; then - echo "WARNING: Skipping malformed manifest line: $line" >&2 - continue - fi - - PIN_NAME="$(basename "$FILEPATH")" - TOTAL=$((TOTAL + 1)) - - echo "==> [${TOTAL}/${ENTRY_COUNT}] ${FILEPATH}" - echo " CID: ${CID}" - - # Local pin. - if ! ipfs pin add "$CID" >/dev/null 2>&1; then - echo " FAILED: local pin" >&2 - LOCAL_FAILED=$((LOCAL_FAILED + 1)) - LOCAL_FAILED_ENTRIES+=("$FILEPATH") - continue - fi - echo " OK: local" - - # Remote pins. - for svc in "${SERVICES[@]}"; do - # Skip services where this CID is already queued/pinning/pinned — - # Pinata rejects duplicate `pin remote add` calls with - # DUPLICATE_OBJECT (400). Same guard as ipfs-upload.sh. - if ipfs pin remote ls --service="$svc" --cid="$CID" \ - --status=queued,pinning,pinned 2>/dev/null | grep -q .; then - echo " OK: ${svc} (already pinned)" - continue - fi - - if ipfs pin remote add --service="$svc" --name="$PIN_NAME" $BACKGROUND "$CID" >/dev/null 2>&1; then - echo " OK: ${svc}" - else - echo " FAILED: ${svc}" >&2 - REMOTE_FAILED=$((REMOTE_FAILED + 1)) - if ! printf '%s\n' "${REMOTE_FAILED_ENTRIES[@]+"${REMOTE_FAILED_ENTRIES[@]}"}" | grep -qxF "$FILEPATH"; then - REMOTE_FAILED_ENTRIES+=("$FILEPATH") - fi - fi - done -done < "$MANIFEST" - -# --------------------------------------------------------------------------- -# Summary -# --------------------------------------------------------------------------- - -echo "" -echo "==> Batch pin complete: ${TOTAL} CIDs processed." - -EXIT_CODE=0 - -if [[ $LOCAL_FAILED -gt 0 ]]; then - echo "" >&2 - echo "ERROR: ${LOCAL_FAILED} CID(s) failed local pinning:" >&2 - for entry in "${LOCAL_FAILED_ENTRIES[@]}"; do - echo " - ${entry}" >&2 - done - EXIT_CODE=1 -fi - -if [[ $REMOTE_FAILED -gt 0 ]]; then - echo "" >&2 - echo "WARNING: ${REMOTE_FAILED} remote pin submission(s) failed:" >&2 - for entry in "${REMOTE_FAILED_ENTRIES[@]}"; do - echo " - ${entry}" >&2 - done - EXIT_CODE=1 -fi - -exit $EXIT_CODE diff --git a/Utilities/Maintenance/ExternalDataUpload/ipfs-upload.sh b/Utilities/Maintenance/ExternalDataUpload/ipfs-upload.sh deleted file mode 100755 index fb1cb27ba2c..00000000000 --- a/Utilities/Maintenance/ExternalDataUpload/ipfs-upload.sh +++ /dev/null @@ -1,359 +0,0 @@ -#!/usr/bin/env bash -set -euo pipefail - -# Upload a file to IPFS (CIDv1, UnixFS v1 2025 profile), pin it on the -# itk-filebase remote pinning service (and on itk-pinata if it is -# configured — Pinata is optional because pin-by-CID is a paid-plan -# feature there), and replace the original with a .cid content link. -# Optionally mirror the bytes into a local ITKTestingData checkout at -# CID/. -# -# Usage: -# ipfs-upload.sh [--testing-data-repo ] [--background] -# -# Options: -# --testing-data-repo Path to a local clone of -# https://github.com/InsightSoftwareConsortium/ITKTestingData -# The uploaded bytes are copied to -# /CID/ and `git add`ed there. -# Skipped with a warning for files > 50 MB, -# which GitHub rejects. -# --background Submit remote pin requests asynchronously -# (pins queue at itk-pinata / itk-filebase and -# the script returns without waiting). Useful -# for batch workflows. Default is synchronous, -# which blocks until each remote reports -# 'pinned' — safer for one-off uploads because -# failures surface immediately, but can take -# minutes per file as the remote fetches the -# content. -# -# Prerequisites: -# - Kubo (go-ipfs) installed and `ipfs` on PATH -# - IPFS daemon running (ipfs daemon, or IPFS Desktop) -# - UnixFS v1 2025 profile applied: `ipfs config profile apply unixfs-v1-2025` -# - `itk-filebase` remote pinning service configured (required) -# - `itk-pinata` remote pinning service configured (optional — Pinata's -# pin-by-CID endpoint is paid-only, so configure it only if you have -# a paid plan) -# -# See README.md in this directory for full setup. - -SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" -REPO_ROOT="$(cd "$SCRIPT_DIR/../../.." && pwd)" - -# Required pinning service — Filebase's free tier supports pin-by-CID -# via the IPFS Pinning Service API, so we always need this one. -REQUIRED_SERVICES=(itk-filebase) - -# Optional pinning service — Pinata's pin-by-CID endpoint requires a paid -# plan (their free tier rejects PSA `pin remote add` with -# PAID_FEATURE_ONLY/403). Pinned to if configured, skipped with a notice -# if not. -OPTIONAL_SERVICES=(itk-pinata) - -# GitHub hard-rejects pushes containing any file > 50 MB. The ITKTestingData -# mirror step is skipped for files over this limit. -GITHUB_FILE_LIMIT_BYTES=$((50 * 1024 * 1024)) - -# --------------------------------------------------------------------------- -# Argument parsing -# --------------------------------------------------------------------------- - -TESTING_DATA_REPO="" -BACKGROUND="" -FILE="" - -while [[ $# -gt 0 ]]; do - case "$1" in - --testing-data-repo) - TESTING_DATA_REPO="${2:?--testing-data-repo requires a path}" - shift 2 - ;; - --testing-data-repo=*) - TESTING_DATA_REPO="${1#--testing-data-repo=}" - shift - ;; - --background) - BACKGROUND="--background" - shift - ;; - -h|--help) - sed -n '3,/^$/{ s/^# \?//; p }' "$0" - exit 0 - ;; - -*) - echo "ERROR: Unknown option: $1" >&2 - exit 1 - ;; - *) - if [[ -n "$FILE" ]]; then - echo "ERROR: Unexpected positional argument: $1" >&2 - exit 1 - fi - FILE="$1" - shift - ;; - esac -done - -if [[ -z "$FILE" ]]; then - echo "Usage: $0 [--testing-data-repo ] " >&2 - exit 1 -fi - -if [[ ! -f "$FILE" ]]; then - echo "ERROR: File not found: $FILE" >&2 - exit 1 -fi - -# Guard: reject symlinks (realpath would resolve to the target, and rm would -# delete the target file rather than the symlink itself). -if [[ -L "$FILE" ]]; then - echo "ERROR: Symlink paths are not supported: $FILE" >&2 - echo " Pass the real file path instead." >&2 - exit 1 -fi - -ABSOLUTE_FILE="$(realpath "$FILE")" - -# Guard: file must be inside the repository. -if [[ "$ABSOLUTE_FILE" != "$REPO_ROOT"/* ]]; then - echo "ERROR: File must be inside the repository: $ABSOLUTE_FILE" >&2 - exit 1 -fi - -# Guard: reject files that are already content links. -for ext in cid md5 sha1 sha224 sha256 sha384 sha512; do - if [[ "$FILE" == *."${ext}" ]]; then - echo "ERROR: File is already a .${ext} content link: $FILE" >&2 - exit 1 - fi -done - -REL_FILE="${ABSOLUTE_FILE#"$REPO_ROOT/"}" - -# Guard: reject paths with whitespace (manifest format uses space as delimiter). -if [[ "$REL_FILE" =~ [[:space:]] ]]; then - echo "ERROR: Filepath contains whitespace, which is not supported: $REL_FILE" >&2 - echo " Rename the file to remove spaces before uploading." >&2 - exit 1 -fi - -PIN_NAME="$(basename "$ABSOLUTE_FILE")" - -# --------------------------------------------------------------------------- -# Validate --testing-data-repo path (before any IPFS work) -# --------------------------------------------------------------------------- - -if [[ -n "$TESTING_DATA_REPO" ]]; then - if [[ ! -d "$TESTING_DATA_REPO" ]]; then - echo "ERROR: --testing-data-repo path is not a directory: $TESTING_DATA_REPO" >&2 - exit 1 - fi - if [[ ! -d "$TESTING_DATA_REPO/.git" ]]; then - echo "ERROR: --testing-data-repo is not a git checkout: $TESTING_DATA_REPO" >&2 - exit 1 - fi - TESTING_DATA_REPO="$(realpath "$TESTING_DATA_REPO")" -fi - -# --------------------------------------------------------------------------- -# Prerequisites -# --------------------------------------------------------------------------- - -if ! command -v ipfs &>/dev/null; then - echo "ERROR: 'ipfs' command not found on PATH." >&2 - echo " Install Kubo: https://docs.ipfs.tech/install/command-line/" >&2 - echo " See: Utilities/Maintenance/ExternalDataUpload/README.md" >&2 - exit 1 -fi - -if ! ipfs swarm peers &>/dev/null; then - echo "ERROR: IPFS daemon does not appear to be running." >&2 - echo " Start with: ipfs daemon" >&2 - echo " Or launch IPFS Desktop." >&2 - exit 1 -fi - -# Check required remote pinning services are configured. Optional services -# are recorded so they're attempted in addition to the required ones; a -# missing optional service is reported but does not abort the upload. -CONFIGURED_SERVICES="$(ipfs pin remote service ls 2>/dev/null || true)" -for svc in "${REQUIRED_SERVICES[@]}"; do - if ! echo "$CONFIGURED_SERVICES" | grep -q "^${svc} "; then - echo "ERROR: Required pinning service '${svc}' is not configured." >&2 - echo " See: Utilities/Maintenance/ExternalDataUpload/README.md" >&2 - exit 1 - fi -done - -ACTIVE_SERVICES=("${REQUIRED_SERVICES[@]}") -for svc in "${OPTIONAL_SERVICES[@]}"; do - if echo "$CONFIGURED_SERVICES" | grep -q "^${svc} "; then - ACTIVE_SERVICES+=("$svc") - else - echo "==> Optional pinning service '${svc}' is not configured; skipping." - fi -done - -# --------------------------------------------------------------------------- -# Add to IPFS -# --------------------------------------------------------------------------- - -echo "==> Adding ${PIN_NAME} to IPFS (CIDv1, UnixFS v1 2025 profile)..." -CID="$(ipfs add --cid-version=1 --quieter "$ABSOLUTE_FILE")" - -if [[ -z "$CID" ]]; then - echo "ERROR: ipfs add returned an empty CID." >&2 - exit 1 -fi - -echo " CID: ${CID}" - -# --------------------------------------------------------------------------- -# Pin locally (ipfs add already pins, but be explicit) -# --------------------------------------------------------------------------- - -echo "==> Pinning locally..." -ipfs pin add "$CID" >/dev/null - -# --------------------------------------------------------------------------- -# Pin on remote services -# --------------------------------------------------------------------------- - -FAILED_PINS=() - -for svc in "${ACTIVE_SERVICES[@]}"; do - # Skip services where this CID is already queued/pinning/pinned — - # Pinata rejects duplicate `pin remote add` calls with - # DUPLICATE_OBJECT (400), and resubmitting on Filebase just makes a - # second queue entry. - if ipfs pin remote ls --service="$svc" --cid="$CID" \ - --status=queued,pinning,pinned 2>/dev/null | grep -q .; then - echo "==> Already pinned (or in flight) on ${svc}; skipping" - continue - fi - - if [[ -n "$BACKGROUND" ]]; then - echo "==> Queueing pin on ${svc} (background)..." - else - echo "==> Pinning on ${svc}..." - fi - if ipfs pin remote add --service="$svc" --name="$PIN_NAME" $BACKGROUND "$CID" 2>&1; then - echo " OK: ${svc}" - else - echo " FAILED: ${svc}" >&2 - FAILED_PINS+=("$svc") - fi -done - -if [[ ${#FAILED_PINS[@]} -gt 0 ]]; then - echo "" >&2 - echo "ERROR: Remote pin submission failed for: ${FAILED_PINS[*]}" >&2 - echo " The original file has NOT been modified." >&2 - echo " Fix the issue and retry, or pin manually:" >&2 - for failed_svc in "${FAILED_PINS[@]}"; do - echo " ipfs pin remote add --service=${failed_svc} --name=\"${PIN_NAME}\" ${BACKGROUND} ${CID}" >&2 - done - exit 1 -fi - -# --------------------------------------------------------------------------- -# Mirror into ITKTestingData (optional, size-gated) -# --------------------------------------------------------------------------- - -FILE_SIZE_BYTES="$(stat -c '%s' "$ABSOLUTE_FILE" 2>/dev/null || stat -f '%z' "$ABSOLUTE_FILE")" - -if [[ -n "$TESTING_DATA_REPO" ]]; then - if (( FILE_SIZE_BYTES > GITHUB_FILE_LIMIT_BYTES )); then - echo "" >&2 - echo "WARNING: ${PIN_NAME} is ${FILE_SIZE_BYTES} bytes (> 50 MB)." >&2 - echo " GitHub rejects pushes containing files > 50 MB, so it" >&2 - echo " will NOT be mirrored to ITKTestingData." >&2 - echo " IPFS pin (local + ${ACTIVE_SERVICES[*]}) succeeded;" >&2 - echo " the .cid content link will still be produced." >&2 - else - MIRROR_DIR="$TESTING_DATA_REPO/CID" - MIRROR_PATH="$MIRROR_DIR/$CID" - mkdir -p "$MIRROR_DIR" - echo "==> Mirroring to ITKTestingData: CID/${CID}" - cp "$ABSOLUTE_FILE" "$MIRROR_PATH" - if ! git -C "$TESTING_DATA_REPO" add "CID/$CID"; then - echo "ERROR: Failed to 'git add CID/$CID' in $TESTING_DATA_REPO" >&2 - rm -f "$MIRROR_PATH" - exit 1 - fi - fi -fi - -# --------------------------------------------------------------------------- -# Replace original file with .cid content link -# (only reached after all required remote pins succeeded) -# -# Ordering hazard: the .cid file is written and the original data file is -# removed BEFORE the manifest update below. If the process is killed or hits -# a disk-full error between here and the `mv` of "${MANIFEST}.tmp", the -# original is gone, the .cid link exists, but the manifest is not updated. -# content-link-normalize.sh's recovery block restores the original link and -# also removes any orphan .cid in that case. A standalone `ipfs-upload.sh` -# crash here leaves the working tree consistent (CID file present, original -# absent) but the manifest stale; the user can re-run after repairing. -# --------------------------------------------------------------------------- - -CID_FILE="${ABSOLUTE_FILE}.cid" -REL_CID="${CID_FILE#"$REPO_ROOT/"}" -printf '%s\n' "$CID" > "$CID_FILE" -rm "$ABSOLUTE_FILE" - -# --------------------------------------------------------------------------- -# Update content link manifest -# --------------------------------------------------------------------------- - -MANIFEST="$REPO_ROOT/Testing/Data/content-links.manifest" - -if [[ -f "$MANIFEST" ]]; then - # Remove existing entry for this filepath (re-upload case). - # Use awk for exact string match (grep would treat dots as wildcards). - awk -v path="$REL_FILE" '$2 != path' "$MANIFEST" > "${MANIFEST}.tmp" - mv "${MANIFEST}.tmp" "$MANIFEST" -else - # Seed a fresh manifest with a brief header. - cat > "$MANIFEST" <<'EOF' -# ITK content-link manifest -# One CID per line, format: -# Maintained by Utilities/Maintenance/ExternalDataUpload/ipfs-upload.sh -EOF -fi - -# Append the new entry. -printf '%s %s\n' "$CID" "$REL_FILE" >> "$MANIFEST" - -# Sort data lines by filepath; preserve comment header at top. -{ - grep '^#' "$MANIFEST" || true - grep -v '^#' "$MANIFEST" | grep -v '^$' | LC_ALL=C sort -k2 -} > "${MANIFEST}.tmp" -mv "${MANIFEST}.tmp" "$MANIFEST" - -echo "" -echo "==> Upload complete." -echo " CID: ${CID}" -echo " Link: ${CID_FILE}" - -# --------------------------------------------------------------------------- -# Suggest git commands -# --------------------------------------------------------------------------- - -echo "" -echo "Next steps (ITK repository):" -echo " git rm \"${REL_FILE}\"" -echo " git add \"${REL_CID}\"" -echo " git add Testing/Data/content-links.manifest" - -if [[ -n "$TESTING_DATA_REPO" && $FILE_SIZE_BYTES -le $GITHUB_FILE_LIMIT_BYTES ]]; then - echo "" - echo "Next steps (ITKTestingData repository at ${TESTING_DATA_REPO}):" - echo " git -C \"${TESTING_DATA_REPO}\" commit -m \"Add ${PIN_NAME} (${CID})\"" - echo " git -C \"${TESTING_DATA_REPO}\" push" -fi diff --git a/Utilities/Maintenance/ExternalDataUpload/normalize.py b/Utilities/Maintenance/ExternalDataUpload/normalize.py new file mode 100755 index 00000000000..6590b7a6672 --- /dev/null +++ b/Utilities/Maintenance/ExternalDataUpload/normalize.py @@ -0,0 +1,319 @@ +#!/usr/bin/env python3 +"""Normalize ITK content links: convert ``.md5`` / ``.shaNNN`` links to ``.cid``. + +For each content link found, the script: + +1. Fetches bytes via the gateway templates declared in + ``CMake/ITKExternalData.cmake`` (identical order to the build). +2. Verifies the bytes against the declared hash or CID. +3. Re-materializes the file alongside the link, then runs the Filebase + uploader from ``upload.py`` so a fresh CID is produced under the + ``unixfs-v1-2025`` profile and pinned on Filebase. The old + ``.md5`` / ``.shaNNN`` link is removed; a ``.cid`` link is written + in its place. + +For ``.cid`` content links, this re-uploads under the current +``unixfs-v1-2025`` profile so a CID produced years ago with a different +chunker is regenerated to match the build pipeline. +""" + +from __future__ import annotations + +import argparse +import hashlib +import re +import subprocess +import sys +import tempfile +import urllib.parse +from pathlib import Path + +import upload as upload_module +from upload import ( + REPO_ROOT, + upload_file_to_filebase, + update_manifest, + mirror_to_testing_data, + CIDV1_RE, +) + +CMAKE_FILE = REPO_ROOT / "CMake" / "ITKExternalData.cmake" + +ALGO_UC = { + "md5": "MD5", + "sha1": "SHA1", + "sha224": "SHA224", + "sha256": "SHA256", + "sha384": "SHA384", + "sha512": "SHA512", + "cid": "cid", +} + + +def parse_url_templates(cmake_file: Path) -> list[str]: + """Extract URL templates from the ``ExternalData_URL_TEMPLATES`` list(). + + Locates the ``list(APPEND ExternalData_URL_TEMPLATES ...)`` invocation, + walks its argument list with a paren-aware scanner (templates contain + ``%(hash)`` / ``%(algo)``, so naive ``.*?`` regex closes the match + prematurely on those inner parens), and returns every quoted argument + that contains ``%(hash)`` in declaration order. + """ + text = cmake_file.read_text() + anchor = re.search( + r"list\s*\(\s*APPEND\s+ExternalData_URL_TEMPLATES\s", + text, + ) + if anchor is None: + sys.exit( + f"ERROR: failed to find ExternalData_URL_TEMPLATES list() in " + f"{cmake_file}" + ) + + depth = 1 + i = anchor.end() + in_string = False + end_idx: int | None = None + while i < len(text): + ch = text[i] + if in_string: + if ch == "\\": + i += 2 + continue + if ch == '"': + in_string = False + else: + if ch == '"': + in_string = True + elif ch == "(": + depth += 1 + elif ch == ")": + depth -= 1 + if depth == 0: + end_idx = i + break + i += 1 + + if end_idx is None: + sys.exit( + f"ERROR: unterminated ExternalData_URL_TEMPLATES list() in " + f"{cmake_file}" + ) + + body = text[anchor.end() : end_idx] + return [t for t in re.findall(r'"([^"]+)"', body) if "%(hash)" in t] + + +def render_url(template: str, algo: str, value: str) -> str: + return template.replace("%(algo)", algo).replace("%(hash)", value) + + +def hash_bytes(ext: str, data: bytes) -> str: + return hashlib.new(ext, data).hexdigest() + + +def fetch_and_verify(ext: str, value: str, templates: list[str]) -> Path: + """Download bytes from the first gateway whose response verifies; return tempfile path. + + For ``.cid`` links, accept any successful HTTP fetch from a path containing + ``/ipfs/`` because IPFS HTTP gateways verify CIDs server-side. For hash + links, recompute the digest locally. + """ + import requests # imported lazily so --help works without the env active + + algo_uc = ALGO_UC.get(ext) + if algo_uc is None: + raise RuntimeError(f"Unknown content-link extension: .{ext}") + + last_error: Exception | None = None + for template in templates: + rendered = render_url(template, algo_uc, value) + if ext != "cid" and "/ipfs/" in urllib.parse.urlparse(rendered).path: + continue + try: + response = requests.get(rendered, timeout=(10, 120)) + response.raise_for_status() + except requests.RequestException as exc: + last_error = exc + continue + + body = response.content + if not body: + continue + + if ext == "cid": + if "/ipfs/" not in urllib.parse.urlparse(rendered).path: + # Non-IPFS origin (e.g. GitHub Pages mirror). We can't verify + # locally without risking chunker-drift false negatives, so + # we keep looking for an IPFS gateway entry. + continue + else: + actual = hash_bytes(ext, body) + if actual.lower() != value.lower(): + print( + f"WARN: content from {rendered} did not verify; " + "trying next gateway", + file=sys.stderr, + ) + continue + + out = Path(tempfile.mkstemp(prefix="itk-content-link.")[1]) + out.write_bytes(body) + return out + + raise RuntimeError( + f"Failed to fetch and verify {ext}={value} from any gateway" + + (f" (last error: {last_error})" if last_error else "") + ) + + +def enumerate_links(target: Path, hash_only: bool, cid_only: bool) -> list[Path]: + if target.is_file(): + return [target] + exts = {f".{e}" for e in upload_module.CONTENT_LINK_EXTS} + found = sorted(p for p in target.rglob("*") if p.is_file() and p.suffix in exts) + filtered: list[Path] = [] + for link in found: + ext = link.suffix.lstrip(".") + if hash_only and ext == "cid": + continue + if cid_only and ext != "cid": + continue + filtered.append(link) + return filtered + + +def main(argv: list[str] | None = None) -> int: + parser = argparse.ArgumentParser( + description=( + "Normalize ITK content links: convert .md5 / .shaNNN to .cid and " + "regenerate existing .cid under the unixfs-v1-2025 profile." + ), + ) + parser.add_argument("target", help="Path or directory to process") + parser.add_argument( + "--testing-data-repo", + metavar="PATH", + help="Forwarded to upload.py; mirror bytes into a local ITKTestingData clone.", + ) + parser.add_argument( + "--bucket", + help="Filebase IPFS bucket (default: $FILEBASE_BUCKET).", + ) + parser.add_argument( + "--dry-run", + action="store_true", + help="List what would change, modify nothing.", + ) + mode = parser.add_mutually_exclusive_group() + mode.add_argument( + "--hash-only", + action="store_true", + help="Process only .md5 / .shaNNN links; leave .cid alone.", + ) + mode.add_argument( + "--cid-only", + action="store_true", + help="Process only .cid links (re-hash under unixfs-v1-2025).", + ) + args = parser.parse_args(argv) + + target = Path(args.target) + if not target.exists(): + sys.exit(f"ERROR: not found: {args.target}") + if not CMAKE_FILE.exists(): + sys.exit(f"ERROR: cannot find {CMAKE_FILE}") + + upload_module._check_node_available() + access_key, secret_key, bucket = upload_module._credentials(args) + + testing_data_repo: Path | None = None + if args.testing_data_repo: + testing_data_repo = Path(args.testing_data_repo).resolve() + if not (testing_data_repo / ".git").exists(): + sys.exit( + f"ERROR: --testing-data-repo is not a git checkout: " + f"{args.testing_data_repo}" + ) + + templates = parse_url_templates(CMAKE_FILE) + print(f"==> Loaded {len(templates)} gateway template(s) from {CMAKE_FILE}") + + links = enumerate_links(target, args.hash_only, args.cid_only) + if not links: + print(f"No matching content links under {target}. Nothing to do.") + return 0 + print(f"==> Processing {len(links)} content link(s)...") + if args.dry_run: + print("(--dry-run: no files will be modified)") + + fail = 0 + for link in links: + ext = link.suffix.lstrip(".") + value = link.read_text().strip() + real_file = link.with_suffix("") + if not value: + print(f"FAIL {link} empty-content-link", file=sys.stderr) + fail += 1 + continue + if ext == "cid" and not CIDV1_RE.match(value): + print(f"FAIL {link} invalid-cid", file=sys.stderr) + fail += 1 + continue + + if args.dry_run: + print( + f"WOULD-NORMALIZE {link} ({ext}={value}) -> {real_file}.cid" + ) + continue + + print(f"==> Normalizing {link} ({ext}={value})") + + if real_file.exists(): + sys.exit( + f"ERROR: refusing to normalize: {real_file} already exists. " + "Delete or move it first." + ) + + try: + tmp_bytes = fetch_and_verify(ext, value, templates) + except RuntimeError as exc: + print(f"FAIL {link} {exc}", file=sys.stderr) + fail += 1 + continue + + tmp_bytes.rename(real_file) + link.unlink() + + try: + cid = upload_file_to_filebase(real_file, bucket, access_key, secret_key) + except (subprocess.CalledProcessError, RuntimeError) as exc: + print(f"FAIL {link} upload-failed: {exc}", file=sys.stderr) + link.write_text(value + "\n") + real_file.unlink(missing_ok=True) + fail += 1 + continue + + if testing_data_repo is not None: + mirror_to_testing_data(real_file, cid, testing_data_repo) + + cid_path = real_file.with_name(real_file.name + ".cid") + cid_path.write_text(cid + "\n") + real_file.unlink() + rel_path = real_file.relative_to(REPO_ROOT).as_posix() + update_manifest(cid, rel_path) + print(f"NORMALIZE {link} ({ext}) -> {cid_path}") + + if fail: + print(f"WARN: {fail} content link(s) failed to normalize.", file=sys.stderr) + return 2 + + print( + "Done. Review changes and commit as a STYLE: commit " + "(see Documentation/AI/git-commits.md)." + ) + return 0 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/Utilities/Maintenance/ExternalDataUpload/upload.py b/Utilities/Maintenance/ExternalDataUpload/upload.py new file mode 100755 index 00000000000..b6716035407 --- /dev/null +++ b/Utilities/Maintenance/ExternalDataUpload/upload.py @@ -0,0 +1,341 @@ +#!/usr/bin/env python3 +"""Upload a file to Filebase IPFS storage and replace it with a .cid content link. + +Builds a CARv1 of the input via ``npx ipfs-car pack`` (defaults: 1 MiB chunks, +1024 children per node, raw leaves, CIDv1 — matches the ``unixfs-v1-2025`` / +IPIP-0499 profile so CIDs are reproducible across implementations) and uploads +the CAR to a Filebase IPFS bucket through its S3-compatible REST API with the +``import: car`` user metadata header. Filebase imports the CAR server-side and +returns the resulting CID in object metadata, which is read back via +``head_object`` and compared against the locally computed root CID. + +The ``.cid`` content link, the manifest entry in +``Testing/Data/content-links.manifest``, and (optionally) a mirror of the bytes +in a local ITKTestingData clone are all produced in the same invocation. +""" + +from __future__ import annotations + +import argparse +import os +import re +import shutil +import subprocess +import sys +import tempfile +from pathlib import Path + +REPO_ROOT = Path(__file__).resolve().parents[3] +MANIFEST = REPO_ROOT / "Testing" / "Data" / "content-links.manifest" +GITHUB_FILE_LIMIT_BYTES = 50 * 1024 * 1024 +FILEBASE_ENDPOINT = "https://s3.filebase.com" + +CONTENT_LINK_EXTS = ("cid", "md5", "sha1", "sha224", "sha256", "sha384", "sha512") +CIDV1_RE = re.compile(r"^baf[a-z0-9]{50,}$") + + +def build_car(input_path: Path, output_car: Path) -> str: + """Pack ``input_path`` into a CARv1 at ``output_car`` and return its root CID. + + Uses ``npx ipfs-car pack`` with ``--no-wrap`` (single-file upload, no + wrapping directory). ipfs-car v1+ defaults match the unixfs-v1-2025 profile + (1 MiB chunks, 1024 links/node, raw leaves, CIDv1), so no extra flags are + needed to produce a reproducible CID. + """ + result = subprocess.run( + [ + "npx", + "--yes", + "ipfs-car", + "pack", + str(input_path), + "--no-wrap", + "--output", + str(output_car), + ], + capture_output=True, + text=True, + check=True, + ) + for line in reversed(result.stdout.splitlines()): + token = line.strip().split()[-1] if line.strip() else "" + if CIDV1_RE.match(token): + return token + raise RuntimeError( + f"Could not parse CID from `npx ipfs-car pack` output:\n{result.stdout}" + ) + + +def upload_car_to_filebase( + car_path: Path, + bucket: str, + object_key: str, + access_key: str, + secret_key: str, +) -> str: + """Upload a CAR to a Filebase IPFS bucket and return the CID Filebase reports. + + Setting ``Metadata={"import": "car"}`` tells Filebase to import the CAR + server-side; the imported root CID is then exposed via + ``head_object()['Metadata']['cid']``. ``put_object`` is used directly + rather than ``upload_file`` because the latter's multipart code path can + strip user metadata on small payloads. + """ + import boto3 # imported lazily so --help works without the env active + + s3 = boto3.client( + "s3", + endpoint_url=FILEBASE_ENDPOINT, + aws_access_key_id=access_key, + aws_secret_access_key=secret_key, + region_name="us-east-1", + ) + with car_path.open("rb") as f: + s3.put_object( + Bucket=bucket, + Key=object_key, + Body=f, + Metadata={"import": "car"}, + ) + head = s3.head_object(Bucket=bucket, Key=object_key) + return head.get("Metadata", {}).get("cid", "") + + +def upload_file_to_filebase( + input_path: Path, + bucket: str, + access_key: str, + secret_key: str, +) -> str: + """End-to-end: build CAR for ``input_path``, upload it, verify, return CID.""" + object_key = input_path.name + ".car" + with tempfile.NamedTemporaryFile(suffix=".car", delete=False) as tmp: + car_path = Path(tmp.name) + try: + local_cid = build_car(input_path, car_path) + remote_cid = upload_car_to_filebase( + car_path, bucket, object_key, access_key, secret_key + ) + if not remote_cid: + raise RuntimeError( + f"Filebase did not return a CID for {object_key}. The CAR may " + "not have been recognised — check the bucket is an IPFS bucket " + "and the access key has write permission." + ) + if local_cid != remote_cid: + raise RuntimeError( + f"CID mismatch: local={local_cid}, filebase={remote_cid}. " + "This indicates a chunker/profile drift between this client " + "and Filebase — file an issue." + ) + return local_cid + finally: + car_path.unlink(missing_ok=True) + + +def update_manifest(cid: str, rel_path: str) -> None: + """Insert/replace ``cid `` in Testing/Data/content-links.manifest.""" + MANIFEST.parent.mkdir(parents=True, exist_ok=True) + + header_lines: list[str] = [] + data_lines: list[str] = [] + if MANIFEST.exists(): + for line in MANIFEST.read_text().splitlines(): + if line.startswith("#"): + header_lines.append(line) + elif line.strip(): + fields = line.split() + if len(fields) >= 2 and fields[1] == rel_path: + continue + data_lines.append(line) + else: + header_lines = [ + "# ITK content-link manifest", + "# One CID per line, format: ", + "# Maintained by Utilities/Maintenance/ExternalDataUpload/upload.py", + ] + + data_lines.append(f"{cid} {rel_path}") + data_lines.sort(key=lambda s: s.split()[1]) + + MANIFEST.write_text("\n".join(header_lines + data_lines) + "\n") + + +def mirror_to_testing_data( + file_path: Path, cid: str, testing_data_repo: Path +) -> bool: + """Copy ``file_path`` to ``/CID/`` and ``git add`` it. + + Returns False (with a warning) for files over GitHub's 50 MB push limit; + True after a successful copy + stage. + """ + size = file_path.stat().st_size + if size > GITHUB_FILE_LIMIT_BYTES: + print( + f"WARNING: {file_path.name} is {size} bytes (> 50 MB). GitHub " + "rejects pushes containing files > 50 MB, so it will NOT be " + "mirrored to ITKTestingData. The Filebase upload still succeeded; " + "the .cid content link will still be produced.", + file=sys.stderr, + ) + return False + + mirror_dir = testing_data_repo / "CID" + mirror_dir.mkdir(parents=True, exist_ok=True) + mirror_path = mirror_dir / cid + print(f"==> Mirroring to ITKTestingData: CID/{cid}") + shutil.copy2(file_path, mirror_path) + try: + subprocess.run( + ["git", "-C", str(testing_data_repo), "add", f"CID/{cid}"], + check=True, + ) + except subprocess.CalledProcessError: + mirror_path.unlink(missing_ok=True) + raise + return True + + +def _validate_input(file_arg: str) -> Path: + file_path = Path(file_arg) + if not file_path.exists(): + sys.exit(f"ERROR: File not found: {file_arg}") + if file_path.is_symlink(): + sys.exit( + f"ERROR: Symlink paths are not supported: {file_arg}\n" + " Pass the real file path instead." + ) + if not file_path.is_file(): + sys.exit(f"ERROR: Not a regular file: {file_arg}") + + abs_path = file_path.resolve() + try: + abs_path.relative_to(REPO_ROOT) + except ValueError: + sys.exit(f"ERROR: File must be inside the repository: {abs_path}") + + suffix = abs_path.suffix.lstrip(".") + if suffix in CONTENT_LINK_EXTS: + sys.exit(f"ERROR: File is already a .{suffix} content link: {file_arg}") + + rel = abs_path.relative_to(REPO_ROOT).as_posix() + if any(c.isspace() for c in rel): + sys.exit( + f"ERROR: Filepath contains whitespace, which is not supported: {rel}\n" + " Rename the file to remove spaces before uploading." + ) + + return abs_path + + +def _credentials(args: argparse.Namespace) -> tuple[str, str, str]: + access_key = os.environ.get("FILEBASE_ACCESS_KEY", "") + secret_key = os.environ.get("FILEBASE_SECRET_KEY", "") + bucket = args.bucket or os.environ.get("FILEBASE_BUCKET", "") + missing = [ + name + for name, value in [ + ("FILEBASE_ACCESS_KEY", access_key), + ("FILEBASE_SECRET_KEY", secret_key), + ("FILEBASE_BUCKET (or --bucket)", bucket), + ] + if not value + ] + if missing: + sys.exit( + "ERROR: Missing Filebase credentials: " + ", ".join(missing) + "\n" + " See: Utilities/Maintenance/ExternalDataUpload/README.md" + ) + return access_key, secret_key, bucket + + +def _check_node_available() -> None: + if shutil.which("npx") is None: + sys.exit( + "ERROR: 'npx' not found on PATH (Node.js required for ipfs-car).\n" + " Run inside the pixi environment:\n" + " pixi run -e external-data-upload python " + "Utilities/Maintenance/ExternalDataUpload/upload.py " + ) + + +def main(argv: list[str] | None = None) -> int: + parser = argparse.ArgumentParser( + description=( + "Upload a file to Filebase IPFS storage; produce a .cid content " + "link, update Testing/Data/content-links.manifest, and optionally " + "mirror the bytes into ITKTestingData." + ), + ) + parser.add_argument("file", help="Path to the file to upload") + parser.add_argument( + "--testing-data-repo", + metavar="PATH", + help=( + "Local clone of github.com/InsightSoftwareConsortium/ITKTestingData; " + "files ≤ 50 MB are copied to /CID/ and `git add`ed." + ), + ) + parser.add_argument( + "--bucket", + help="Filebase IPFS bucket (default: $FILEBASE_BUCKET).", + ) + args = parser.parse_args(argv) + + _check_node_available() + abs_path = _validate_input(args.file) + access_key, secret_key, bucket = _credentials(args) + + testing_data_repo: Path | None = None + if args.testing_data_repo: + testing_data_repo = Path(args.testing_data_repo).resolve() + if not testing_data_repo.is_dir(): + sys.exit( + f"ERROR: --testing-data-repo path is not a directory: " + f"{args.testing_data_repo}" + ) + if not (testing_data_repo / ".git").exists(): + sys.exit( + f"ERROR: --testing-data-repo is not a git checkout: " + f"{args.testing_data_repo}" + ) + + rel_path = abs_path.relative_to(REPO_ROOT).as_posix() + print(f"==> Packing {abs_path.name} into a CAR (unixfs-v1-2025 profile)...") + print(f"==> Uploading to Filebase bucket {bucket!r}...") + cid = upload_file_to_filebase(abs_path, bucket, access_key, secret_key) + print(f" CID: {cid}") + + mirrored = False + if testing_data_repo is not None: + mirrored = mirror_to_testing_data(abs_path, cid, testing_data_repo) + + cid_path = abs_path.with_name(abs_path.name + ".cid") + cid_path.write_text(cid + "\n") + abs_path.unlink() + + update_manifest(cid, rel_path) + + rel_cid = cid_path.relative_to(REPO_ROOT).as_posix() + print() + print("==> Upload complete.") + print(f" CID: {cid}") + print(f" Link: {cid_path}") + print() + print("Next steps (ITK repository):") + print(f' git rm "{rel_path}"') + print(f' git add "{rel_cid}"') + print(" git add Testing/Data/content-links.manifest") + if mirrored and testing_data_repo is not None: + print() + print(f"Next steps (ITKTestingData repository at {testing_data_repo}):") + print( + f' git -C "{testing_data_repo}" commit ' + f'-m "Add {abs_path.name} ({cid})"' + ) + print(f' git -C "{testing_data_repo}" push') + return 0 + + +if __name__ == "__main__": + raise SystemExit(main()) diff --git a/pyproject.toml b/pyproject.toml index 1ab39fd4202..e8b2126ba5c 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -12,6 +12,11 @@ clean = { cmd = "git clean -fdx", description = "Clean the repository" } [tool.pixi.dependencies] python = ">=3.13.5,<3.14" +[tool.pixi.feature.external-data-upload.dependencies] +boto3 = ">=1.34" +nodejs = ">=20" +requests = ">=2.32" + [tool.pixi.feature.pre-commit.dependencies] pre-commit = ">=4.1.0,<5" @@ -173,3 +178,4 @@ dev = ["dev"] cxx = ["dev", "cxx"] python = ["python", "dev"] pre-commit = ["pre-commit"] +external-data-upload = ["external-data-upload"]